]> review.fuel-infra Code Review - packages/trusty/rabbitmq-server.git/commitdiff
Upgrade to 3.6.6 11/28711/4
authorAlexey Lebedeff <alebedev@mirantis.com>
Thu, 10 Nov 2016 09:08:01 +0000 (12:08 +0300)
committerAlexey Lebedeff <alebedev@mirantis.com>
Tue, 22 Nov 2016 12:46:45 +0000 (15:46 +0300)
This is squash-commit of
https://review.fuel-infra.org/#/c/28448/ and
https://review.fuel-infra.org/#/c/28449/

Import new upstream release. Packaging staff is also imported from the
same source.

Changes applied on top of upstream .deb package:
- fuel-ha-utils in 8.0 still contain its own copy of OCF script. Replace
  it with the latest version using dpkg-divert mechanism.
- Patch OCF to use policy file from old fuel-library location
- Patch OCF to not to use private pacemaker attributes - pacemaker in
  8.0  doesn't support them
- Disable broker startup on installation
- Tune fd limit
- Add erlang-src dependency, build fails on ssl headers otherwise

Change-Id: I53e9df00809d744b44957eb1f5220af625c374c2

1619 files changed:
CODE_OF_CONDUCT.md [new file with mode: 0644]
CONTRIBUTING.md [moved from rabbitmq-server/plugins-src/cowboy-wrapper/CONTRIBUTING.md with 72% similarity]
LICENSE [moved from rabbitmq-server/LICENSE with 98% similarity]
LICENSE-APACHE2-ExplorerCanvas [moved from rabbitmq-server/LICENSE-APACHE2-ExplorerCanvas with 100% similarity]
LICENSE-APL2-Rebar [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-APL2-Rebar with 100% similarity]
LICENSE-APL2-Stomp-Websocket [moved from rabbitmq-server/LICENSE-APL2-Stomp-Websocket with 100% similarity]
LICENSE-BSD-base64js [moved from rabbitmq-server/LICENSE-BSD-base64js with 100% similarity]
LICENSE-BSD-glMatrix [moved from rabbitmq-server/LICENSE-BSD-glMatrix with 100% similarity]
LICENSE-EPL-OTP [new file with mode: 0644]
LICENSE-MIT-EJS10 [moved from rabbitmq-server/LICENSE-MIT-EJS10 with 100% similarity]
LICENSE-MIT-Erlware-Commons [new file with mode: 0644]
LICENSE-MIT-Flot [moved from rabbitmq-server/LICENSE-MIT-Flot with 100% similarity]
LICENSE-MIT-Mochi [moved from rabbitmq-server/LICENSE-MIT-Mochi with 100% similarity]
LICENSE-MIT-Mochiweb [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-MIT-Mochiweb with 100% similarity]
LICENSE-MIT-Sammy060 [moved from rabbitmq-server/LICENSE-MIT-Sammy060 with 100% similarity]
LICENSE-MIT-SockJS [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-MIT-SockJS with 100% similarity]
LICENSE-MIT-jQuery164 [moved from rabbitmq-server/LICENSE-MIT-jQuery164 with 100% similarity]
LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/LICENSE-MPL-RabbitMQ with 100% similarity]
LICENSE-MPL2 [new file with mode: 0644]
Makefile [new file with mode: 0644]
debian/changelog
debian/control
debian/copyright
debian/dirs [new file with mode: 0644]
debian/gbp.conf [deleted file]
debian/patches/detect-stuck-queue-on-declare.diff [deleted file]
debian/patches/erlang-18-segfault.diff [deleted file]
debian/patches/fix-management-startup-after-split.diff [deleted file]
debian/patches/fix-pmon-demonitor-function.diff [deleted file]
debian/patches/policy_location.patch [new file with mode: 0644]
debian/patches/revert_private_attributes.patch [new file with mode: 0644]
debian/patches/series
debian/postinst [moved from debian/rabbitmq-server.postinst with 95% similarity]
debian/postrm.in [moved from debian/rabbitmq-server.postrm with 70% similarity]
debian/preinst [new file with mode: 0644]
debian/rabbitmq-env.conf [deleted file]
debian/rabbitmq-server-wait [deleted file]
debian/rabbitmq-server.dirs [deleted file]
debian/rabbitmq-server.docs [new file with mode: 0644]
debian/rabbitmq-server.init
debian/rabbitmq-server.install [deleted file]
debian/rabbitmq-server.links [deleted file]
debian/rabbitmq-server.manpages [new file with mode: 0644]
debian/rabbitmq-server.service
debian/rules
debian/watch
deps/amqp_client/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/amqp_client/CONTRIBUTING.md [moved from rabbitmq-server/codegen/CONTRIBUTING.md with 72% similarity]
deps/amqp_client/Makefile [new file with mode: 0644]
deps/amqp_client/README.in [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/README.in with 100% similarity]
deps/amqp_client/ci/test.sh [new file with mode: 0755]
deps/amqp_client/ci/test.yml [new file with mode: 0644]
deps/amqp_client/erlang.mk [new file with mode: 0644]
deps/amqp_client/include/amqp_client.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client.hrl with 97% similarity]
deps/amqp_client/include/amqp_client_internal.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client_internal.hrl with 100% similarity]
deps/amqp_client/include/amqp_gen_consumer_spec.hrl [new file with mode: 0644]
deps/amqp_client/include/rabbit_routing_prefixes.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/rabbit_routing_prefixes.hrl with 100% similarity]
deps/amqp_client/rabbitmq-components.mk [new file with mode: 0644]
deps/amqp_client/src/amqp_auth_mechanisms.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_auth_mechanisms.erl with 96% similarity]
deps/amqp_client/src/amqp_channel.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel.erl with 99% similarity]
deps/amqp_client/src/amqp_channel_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup.erl with 91% similarity]
deps/amqp_client/src/amqp_channel_sup_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup_sup.erl with 96% similarity]
deps/amqp_client/src/amqp_channels_manager.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channels_manager.erl with 99% similarity]
deps/amqp_client/src/amqp_client.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/ebin/amqp_client.app.in with 72% similarity]
deps/amqp_client/src/amqp_client.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_client.erl with 92% similarity]
deps/amqp_client/src/amqp_connection.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection.erl with 85% similarity]
deps/amqp_client/src/amqp_connection_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_sup.erl with 96% similarity]
deps/amqp_client/src/amqp_connection_type_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_type_sup.erl with 89% similarity]
deps/amqp_client/src/amqp_direct_connection.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_connection.erl with 93% similarity]
deps/amqp_client/src/amqp_direct_consumer.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_consumer.erl with 96% similarity]
deps/amqp_client/src/amqp_gen_connection.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_connection.erl with 99% similarity]
deps/amqp_client/src/amqp_gen_consumer.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_consumer.erl with 100% similarity]
deps/amqp_client/src/amqp_main_reader.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_main_reader.erl with 98% similarity]
deps/amqp_client/src/amqp_network_connection.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_network_connection.erl with 97% similarity]
deps/amqp_client/src/amqp_rpc_client.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_client.erl with 99% similarity]
deps/amqp_client/src/amqp_rpc_server.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_server.erl with 98% similarity]
deps/amqp_client/src/amqp_selective_consumer.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_selective_consumer.erl with 100% similarity]
deps/amqp_client/src/amqp_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_sup.erl with 88% similarity]
deps/amqp_client/src/amqp_uri.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_uri.erl with 92% similarity]
deps/amqp_client/src/overview.edoc.in [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/overview.edoc.in with 100% similarity]
deps/amqp_client/src/rabbit_routing_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/rabbit_routing_util.erl with 99% similarity]
deps/amqp_client/src/uri_parser.erl [moved from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/uri_parser.erl with 100% similarity]
deps/cowboy/AUTHORS [new file with mode: 0644]
deps/cowboy/CHANGELOG.md [new file with mode: 0644]
deps/cowboy/CONTRIBUTING.md [new file with mode: 0644]
deps/cowboy/LICENSE [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/LICENSE with 92% similarity]
deps/cowboy/Makefile [new file with mode: 0644]
deps/cowboy/README.md [new file with mode: 0644]
deps/cowboy/ROADMAP.md [new file with mode: 0644]
deps/cowboy/all.sh [new file with mode: 0755]
deps/cowboy/circle.yml [new file with mode: 0644]
deps/cowboy/erlang.mk [new file with mode: 0644]
deps/cowboy/rebar.config [new file with mode: 0644]
deps/cowboy/src/cowboy.app.src [new file with mode: 0644]
deps/cowboy/src/cowboy.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_app.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.app.src with 71% similarity]
deps/cowboy/src/cowboy_bstr.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_bstr.erl with 54% similarity]
deps/cowboy/src/cowboy_clock.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_clock.erl with 74% similarity]
deps/cowboy/src/cowboy_handler.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_http.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http.erl with 69% similarity]
deps/cowboy/src/cowboy_http_handler.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_loop_handler.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_middleware.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_protocol.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_req.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_rest.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_rest.erl with 54% similarity]
deps/cowboy/src/cowboy_router.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_spdy.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_static.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_sub_protocol.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_sup.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_sup.erl with 75% similarity]
deps/cowboy/src/cowboy_websocket.erl [new file with mode: 0644]
deps/cowboy/src/cowboy_websocket_handler.erl [new file with mode: 0644]
deps/cowlib/AUTHORS [new file with mode: 0644]
deps/cowlib/CHANGELOG.md [new file with mode: 0644]
deps/cowlib/LICENSE [new file with mode: 0644]
deps/cowlib/Makefile [new file with mode: 0644]
deps/cowlib/README.md [new file with mode: 0644]
deps/cowlib/all.sh [new file with mode: 0755]
deps/cowlib/build.config [new file with mode: 0644]
deps/cowlib/erlang.mk [new file with mode: 0644]
deps/cowlib/include/cow_inline.hrl [new file with mode: 0644]
deps/cowlib/src/cow_cookie.erl [new file with mode: 0644]
deps/cowlib/src/cow_date.erl [new file with mode: 0644]
deps/cowlib/src/cow_http.erl [new file with mode: 0644]
deps/cowlib/src/cow_http_hd.erl [new file with mode: 0644]
deps/cowlib/src/cow_http_te.erl [new file with mode: 0644]
deps/cowlib/src/cow_mimetypes.erl [new file with mode: 0644]
deps/cowlib/src/cow_mimetypes.erl.src [new file with mode: 0644]
deps/cowlib/src/cow_multipart.erl [new file with mode: 0644]
deps/cowlib/src/cow_qs.erl [new file with mode: 0644]
deps/cowlib/src/cow_spdy.erl [new file with mode: 0644]
deps/cowlib/src/cow_spdy.hrl [new file with mode: 0644]
deps/cowlib/src/cowlib.app.src [new file with mode: 0644]
deps/licensing/LICENSE-APACHE2-ExplorerCanvas [moved from rabbitmq-server/plugins-src/licensing/LICENSE-APACHE2-ExplorerCanvas with 100% similarity]
deps/licensing/LICENSE-APL2-Rebar [moved from rabbitmq-server/LICENSE-Apache-Basho with 100% similarity]
deps/licensing/LICENSE-APL2-Stomp-Websocket [moved from rabbitmq-server/plugins-src/licensing/LICENSE-APL2-Stomp-Websocket with 100% similarity]
deps/licensing/LICENSE-BSD-base64js [moved from rabbitmq-server/plugins-src/licensing/LICENSE-BSD-base64js with 100% similarity]
deps/licensing/LICENSE-BSD-glMatrix [moved from rabbitmq-server/plugins-src/licensing/LICENSE-BSD-glMatrix with 100% similarity]
deps/licensing/LICENSE-EPL-OTP [new file with mode: 0644]
deps/licensing/LICENSE-MIT-EJS10 [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-EJS10 with 100% similarity]
deps/licensing/LICENSE-MIT-Erlware-Commons [new file with mode: 0644]
deps/licensing/LICENSE-MIT-Flot [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Flot with 100% similarity]
deps/licensing/LICENSE-MIT-Mochi [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Mochi with 100% similarity]
deps/licensing/LICENSE-MIT-Mochiweb [new file with mode: 0644]
deps/licensing/LICENSE-MIT-Sammy060 [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Sammy060 with 100% similarity]
deps/licensing/LICENSE-MIT-SockJS [moved from rabbitmq-server/LICENSE-MIT-eldap with 96% similarity]
deps/licensing/LICENSE-MIT-jQuery164 [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-jQuery164 with 100% similarity]
deps/licensing/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/codegen/LICENSE-MPL-RabbitMQ with 100% similarity]
deps/licensing/LICENSE-MPL2 [new file with mode: 0644]
deps/licensing/license_info_rabbitmq_codegen [new file with mode: 0644]
deps/licensing/license_info_rabbitmq_management [moved from rabbitmq-server/plugins-src/licensing/license_info_rabbitmq-management with 100% similarity]
deps/licensing/license_info_rabbitmq_management_visualiser [moved from rabbitmq-server/plugins-src/licensing/license_info_rabbitmq-management-visualiser with 100% similarity]
deps/mochiweb/.editorconfig [new file with mode: 0644]
deps/mochiweb/CHANGES.md [new file with mode: 0644]
deps/mochiweb/LICENSE [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/LICENSE with 100% similarity]
deps/mochiweb/Makefile [new file with mode: 0644]
deps/mochiweb/Makefile.orig.mk [new file with mode: 0644]
deps/mochiweb/README [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/README with 100% similarity]
deps/mochiweb/examples/hmac_api/README [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/README with 100% similarity]
deps/mochiweb/examples/hmac_api/hmac_api.hrl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api.hrl with 100% similarity]
deps/mochiweb/examples/hmac_api/hmac_api_client.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api_client.erl with 100% similarity]
deps/mochiweb/examples/hmac_api/hmac_api_lib.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api_lib.erl with 100% similarity]
deps/mochiweb/examples/https/https_store.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/https_store.erl with 100% similarity]
deps/mochiweb/examples/https/server_cert.pem [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/server_cert.pem with 100% similarity]
deps/mochiweb/examples/https/server_key.pem [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/server_key.pem with 100% similarity]
deps/mochiweb/examples/keepalive/keepalive.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/keepalive/keepalive.erl with 100% similarity]
deps/mochiweb/examples/websocket/index.html [new file with mode: 0644]
deps/mochiweb/examples/websocket/websocket.erl [new file with mode: 0644]
deps/mochiweb/include/internal.hrl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/include/internal.hrl with 100% similarity]
deps/mochiweb/rebar [new file with mode: 0755]
deps/mochiweb/rebar.config [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar.config with 81% similarity]
deps/mochiweb/scripts/entities.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/entities.erl with 100% similarity]
deps/mochiweb/src/mochifmt.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt.erl with 93% similarity]
deps/mochiweb/src/mochifmt_records.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_records.erl with 50% similarity]
deps/mochiweb/src/mochifmt_std.erl [new file with mode: 0644]
deps/mochiweb/src/mochiglobal.erl [new file with mode: 0644]
deps/mochiweb/src/mochihex.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochihex.erl with 65% similarity]
deps/mochiweb/src/mochijson.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochijson.erl with 94% similarity]
deps/mochiweb/src/mochijson2.erl [new file with mode: 0644]
deps/mochiweb/src/mochilists.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilists.erl with 72% similarity]
deps/mochiweb/src/mochilogfile2.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilogfile2.erl with 77% similarity]
deps/mochiweb/src/mochinum.erl [new file with mode: 0644]
deps/mochiweb/src/mochitemp.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochitemp.erl with 85% similarity]
deps/mochiweb/src/mochiutf8.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiutf8.erl with 84% similarity]
deps/mochiweb/src/mochiweb.app.src [new file with mode: 0644]
deps/mochiweb/src/mochiweb.erl [new file with mode: 0644]
deps/mochiweb/src/mochiweb_acceptor.erl [new file with mode: 0644]
deps/mochiweb/src/mochiweb_base64url.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_base64url.erl with 71% similarity]
deps/mochiweb/src/mochiweb_charref.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_charref.erl with 98% similarity]
deps/mochiweb/src/mochiweb_clock.erl [new file with mode: 0644]
deps/mochiweb/src/mochiweb_cookies.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cookies.erl with 90% similarity]
deps/mochiweb/src/mochiweb_cover.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cover.erl with 64% similarity]
deps/mochiweb/src/mochiweb_echo.erl [new file with mode: 0644]
deps/mochiweb/src/mochiweb_headers.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_headers.erl with 93% similarity]
deps/mochiweb/src/mochiweb_html.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_html.erl with 91% similarity]
deps/mochiweb/src/mochiweb_http.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_http.erl with 61% similarity]
deps/mochiweb/src/mochiweb_io.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_io.erl with 50% similarity]
deps/mochiweb/src/mochiweb_mime.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_mime.erl with 90% similarity]
deps/mochiweb/src/mochiweb_multipart.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_multipart.erl with 96% similarity]
deps/mochiweb/src/mochiweb_request.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request.erl with 76% similarity]
deps/mochiweb/src/mochiweb_response.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_response.erl with 68% similarity]
deps/mochiweb/src/mochiweb_session.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_session.erl with 69% similarity]
deps/mochiweb/src/mochiweb_socket.erl [new file with mode: 0644]
deps/mochiweb/src/mochiweb_socket_server.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket_server.erl with 76% similarity]
deps/mochiweb/src/mochiweb_util.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_util.erl with 97% similarity]
deps/mochiweb/src/mochiweb_websocket.erl [new file with mode: 0644]
deps/mochiweb/src/reloader.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/reloader.erl with 81% similarity]
deps/mochiweb/support/templates/mochiwebapp.template [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp.template with 92% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh [new file with mode: 0755]
deps/mochiweb/support/templates/mochiwebapp_skel/priv/www/index.html [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/priv/www/index.html with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/rebar.config [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/rebar.config with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp.app.src [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp.app.src with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp.erl with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_app.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_app.erl with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl with 100% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_web.erl [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_web.erl with 89% similarity]
deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh [new file with mode: 0755]
deps/mochiweb/support/test-materials/test_ssl_cert.pem [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/test-materials/test_ssl_cert.pem with 100% similarity]
deps/mochiweb/support/test-materials/test_ssl_key.pem [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/test-materials/test_ssl_key.pem with 100% similarity]
deps/rabbit/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbit/CONTRIBUTING.md [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/CONTRIBUTING.md with 72% similarity]
deps/rabbit/INSTALL [moved from rabbitmq-server/INSTALL with 100% similarity]
deps/rabbit/LICENSE [new file with mode: 0644]
deps/rabbit/LICENSE-MIT-Mochi [moved from rabbitmq-server/plugins-src/mochiweb-wrapper/LICENSE-MIT-Mochi with 100% similarity]
deps/rabbit/LICENSE-MPL-RabbitMQ [new file with mode: 0644]
deps/rabbit/Makefile [new file with mode: 0644]
deps/rabbit/README [new file with mode: 0644]
deps/rabbit/README.md [new file with mode: 0644]
deps/rabbit/check_xref [new file with mode: 0755]
deps/rabbit/docs/README-for-packages [new file with mode: 0644]
deps/rabbit/docs/examples-to-end.xsl [moved from rabbitmq-server/docs/examples-to-end.xsl with 100% similarity]
deps/rabbit/docs/html-to-website-xml.xsl [moved from rabbitmq-server/docs/html-to-website-xml.xsl with 100% similarity]
deps/rabbit/docs/rabbitmq-echopid.xml [moved from rabbitmq-server/docs/rabbitmq-echopid.xml with 100% similarity]
deps/rabbit/docs/rabbitmq-env.conf.5.xml [moved from rabbitmq-server/docs/rabbitmq-env.conf.5.xml with 100% similarity]
deps/rabbit/docs/rabbitmq-plugins.1.xml [moved from rabbitmq-server/docs/rabbitmq-plugins.1.xml with 100% similarity]
deps/rabbit/docs/rabbitmq-server.1.xml [moved from rabbitmq-server/docs/rabbitmq-server.1.xml with 100% similarity]
deps/rabbit/docs/rabbitmq-server.service.example [new file with mode: 0644]
deps/rabbit/docs/rabbitmq-service.xml [moved from rabbitmq-server/docs/rabbitmq-service.xml with 100% similarity]
deps/rabbit/docs/rabbitmq.config.example [moved from rabbitmq-server/docs/rabbitmq.config.example with 87% similarity]
deps/rabbit/docs/rabbitmqctl.1.xml [moved from rabbitmq-server/docs/rabbitmqctl.1.xml with 85% similarity]
deps/rabbit/docs/remove-namespaces.xsl [moved from rabbitmq-server/docs/remove-namespaces.xsl with 100% similarity]
deps/rabbit/docs/set_rabbitmq_policy.sh.example [new file with mode: 0644]
deps/rabbit/docs/usage.xsl [moved from rabbitmq-server/docs/usage.xsl with 90% similarity]
deps/rabbit/erlang.mk [new file with mode: 0644]
deps/rabbit/include/gm_specs.hrl [moved from rabbitmq-server/include/gm_specs.hrl with 59% similarity]
deps/rabbit/include/rabbit_cli.hrl [moved from rabbitmq-server/include/rabbit_cli.hrl with 53% similarity]
deps/rabbit/quickcheck [new file with mode: 0755]
deps/rabbit/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbit/scripts/rabbitmq-defaults [moved from rabbitmq-server/scripts/rabbitmq-defaults with 86% similarity]
deps/rabbit/scripts/rabbitmq-defaults.bat [moved from rabbitmq-server/scripts/rabbitmq-defaults.bat with 58% similarity, mode: 0644]
deps/rabbit/scripts/rabbitmq-echopid.bat [moved from rabbitmq-server/scripts/rabbitmq-echopid.bat with 91% similarity, mode: 0644]
deps/rabbit/scripts/rabbitmq-env [moved from rabbitmq-server/scripts/rabbitmq-env with 52% similarity]
deps/rabbit/scripts/rabbitmq-env.bat [new file with mode: 0644]
deps/rabbit/scripts/rabbitmq-plugins [moved from rabbitmq-server/scripts/rabbitmq-plugins with 87% similarity]
deps/rabbit/scripts/rabbitmq-plugins.bat [moved from rabbitmq-server/scripts/rabbitmq-plugins.bat with 86% similarity, mode: 0644]
deps/rabbit/scripts/rabbitmq-server [moved from rabbitmq-server/scripts/rabbitmq-server with 58% similarity]
deps/rabbit/scripts/rabbitmq-server.bat [moved from rabbitmq-server/scripts/rabbitmq-server.bat with 65% similarity, mode: 0644]
deps/rabbit/scripts/rabbitmq-service.bat [moved from rabbitmq-server/scripts/rabbitmq-service.bat with 81% similarity, mode: 0644]
deps/rabbit/scripts/rabbitmqctl [moved from rabbitmq-server/scripts/rabbitmqctl with 89% similarity]
deps/rabbit/scripts/rabbitmqctl.bat [moved from rabbitmq-server/scripts/rabbitmqctl.bat with 86% similarity, mode: 0644]
deps/rabbit/src/background_gc.erl [moved from rabbitmq-server/src/background_gc.erl with 89% similarity]
deps/rabbit/src/delegate.erl [moved from rabbitmq-server/src/delegate.erl with 91% similarity]
deps/rabbit/src/delegate_sup.erl [moved from rabbitmq-server/src/delegate_sup.erl with 88% similarity]
deps/rabbit/src/dtree.erl [moved from rabbitmq-server/src/dtree.erl with 86% similarity]
deps/rabbit/src/file_handle_cache.erl [moved from rabbitmq-server/src/file_handle_cache.erl with 89% similarity]
deps/rabbit/src/file_handle_cache_stats.erl [moved from rabbitmq-server/src/file_handle_cache_stats.erl with 87% similarity]
deps/rabbit/src/gatherer.erl [moved from rabbitmq-server/src/gatherer.erl with 90% similarity]
deps/rabbit/src/gm.erl [moved from rabbitmq-server/src/gm.erl with 82% similarity]
deps/rabbit/src/lqueue.erl [moved from rabbitmq-server/src/lqueue.erl with 69% similarity]
deps/rabbit/src/mirrored_supervisor_sups.erl [moved from rabbitmq-server/src/mirrored_supervisor_sups.erl with 100% similarity]
deps/rabbit/src/mnesia_sync.erl [moved from rabbitmq-server/src/mnesia_sync.erl with 92% similarity]
deps/rabbit/src/pg_local.erl [moved from rabbitmq-server/src/pg_local.erl with 89% similarity]
deps/rabbit/src/rabbit.app.src [moved from rabbitmq-server/ebin/rabbit_app.in with 82% similarity]
deps/rabbit/src/rabbit.erl [moved from rabbitmq-server/src/rabbit.erl with 74% similarity]
deps/rabbit/src/rabbit_access_control.erl [moved from rabbitmq-server/src/rabbit_access_control.erl with 84% similarity]
deps/rabbit/src/rabbit_alarm.erl [moved from rabbitmq-server/src/rabbit_alarm.erl with 64% similarity]
deps/rabbit/src/rabbit_amqqueue_process.erl [moved from rabbitmq-server/src/rabbit_amqqueue_process.erl with 89% similarity]
deps/rabbit/src/rabbit_amqqueue_sup.erl [moved from rabbitmq-server/src/rabbit_amqqueue_sup.erl with 82% similarity]
deps/rabbit/src/rabbit_amqqueue_sup_sup.erl [moved from rabbitmq-server/src/rabbit_amqqueue_sup_sup.erl with 78% similarity]
deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl [moved from rabbitmq-server/src/rabbit_auth_mechanism_amqplain.erl with 96% similarity]
deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl [moved from rabbitmq-server/src/rabbit_auth_mechanism_cr_demo.erl with 96% similarity]
deps/rabbit/src/rabbit_auth_mechanism_plain.erl [moved from rabbitmq-server/src/rabbit_auth_mechanism_plain.erl with 97% similarity]
deps/rabbit/src/rabbit_autoheal.erl [moved from rabbitmq-server/src/rabbit_autoheal.erl with 82% similarity]
deps/rabbit/src/rabbit_binding.erl [moved from rabbitmq-server/src/rabbit_binding.erl with 89% similarity]
deps/rabbit/src/rabbit_boot_steps.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_channel_sup.erl [moved from rabbitmq-server/src/rabbit_channel_sup.erl with 80% similarity]
deps/rabbit/src/rabbit_channel_sup_sup.erl [moved from rabbitmq-server/src/rabbit_channel_sup_sup.erl with 76% similarity]
deps/rabbit/src/rabbit_cli.erl [moved from rabbitmq-server/src/rabbit_cli.erl with 59% similarity]
deps/rabbit/src/rabbit_client_sup.erl [moved from rabbitmq-server/src/rabbit_client_sup.erl with 71% similarity]
deps/rabbit/src/rabbit_connection_helper_sup.erl [moved from rabbitmq-server/src/rabbit_connection_helper_sup.erl with 71% similarity]
deps/rabbit/src/rabbit_connection_sup.erl [moved from rabbitmq-server/src/rabbit_connection_sup.erl with 76% similarity]
deps/rabbit/src/rabbit_control_main.erl [moved from rabbitmq-server/src/rabbit_control_main.erl with 67% similarity]
deps/rabbit/src/rabbit_control_pbe.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_dead_letter.erl [moved from rabbitmq-server/src/rabbit_dead_letter.erl with 91% similarity]
deps/rabbit/src/rabbit_diagnostics.erl [moved from rabbitmq-server/src/rabbit_diagnostics.erl with 74% similarity]
deps/rabbit/src/rabbit_direct.erl [moved from rabbitmq-server/src/rabbit_direct.erl with 73% similarity]
deps/rabbit/src/rabbit_disk_monitor.erl [moved from rabbitmq-server/src/rabbit_disk_monitor.erl with 70% similarity]
deps/rabbit/src/rabbit_epmd_monitor.erl [moved from rabbitmq-server/src/rabbit_epmd_monitor.erl with 95% similarity]
deps/rabbit/src/rabbit_error_logger.erl [moved from rabbitmq-server/src/rabbit_error_logger.erl with 81% similarity]
deps/rabbit/src/rabbit_error_logger_file_h.erl [moved from rabbitmq-server/src/rabbit_error_logger_file_h.erl with 97% similarity]
deps/rabbit/src/rabbit_exchange.erl [moved from rabbitmq-server/src/rabbit_exchange.erl with 78% similarity]
deps/rabbit/src/rabbit_exchange_parameters.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_exchange_type_direct.erl [moved from rabbitmq-server/src/rabbit_exchange_type_direct.erl with 96% similarity]
deps/rabbit/src/rabbit_exchange_type_fanout.erl [moved from rabbitmq-server/src/rabbit_exchange_type_fanout.erl with 96% similarity]
deps/rabbit/src/rabbit_exchange_type_headers.erl [moved from rabbitmq-server/src/rabbit_exchange_type_headers.erl with 73% similarity]
deps/rabbit/src/rabbit_exchange_type_invalid.erl [moved from rabbitmq-server/src/rabbit_exchange_type_invalid.erl with 89% similarity]
deps/rabbit/src/rabbit_exchange_type_topic.erl [moved from rabbitmq-server/src/rabbit_exchange_type_topic.erl with 99% similarity]
deps/rabbit/src/rabbit_file.erl [moved from rabbitmq-server/src/rabbit_file.erl with 87% similarity]
deps/rabbit/src/rabbit_framing.erl [moved from rabbitmq-server/src/rabbit_framing.erl with 90% similarity]
deps/rabbit/src/rabbit_guid.erl [moved from rabbitmq-server/src/rabbit_guid.erl with 93% similarity]
deps/rabbit/src/rabbit_hipe.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_limiter.erl [moved from rabbitmq-server/src/rabbit_limiter.erl with 90% similarity]
deps/rabbit/src/rabbit_log.erl [moved from rabbitmq-server/src/rabbit_log.erl with 75% similarity]
deps/rabbit/src/rabbit_memory_monitor.erl [moved from rabbitmq-server/src/rabbit_memory_monitor.erl with 94% similarity]
deps/rabbit/src/rabbit_mirror_queue_coordinator.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_coordinator.erl with 91% similarity]
deps/rabbit/src/rabbit_mirror_queue_master.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_master.erl with 75% similarity]
deps/rabbit/src/rabbit_mirror_queue_misc.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_misc.erl with 79% similarity]
deps/rabbit/src/rabbit_mirror_queue_mode.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_mode.erl with 83% similarity]
deps/rabbit/src/rabbit_mirror_queue_mode_all.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_mode_all.erl with 100% similarity]
deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_mode_exactly.erl with 93% similarity]
deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_mode_nodes.erl with 65% similarity]
deps/rabbit/src/rabbit_mirror_queue_slave.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_slave.erl with 92% similarity]
deps/rabbit/src/rabbit_mirror_queue_sync.erl [moved from rabbitmq-server/src/rabbit_mirror_queue_sync.erl with 55% similarity]
deps/rabbit/src/rabbit_mnesia.erl [moved from rabbitmq-server/src/rabbit_mnesia.erl with 83% similarity]
deps/rabbit/src/rabbit_mnesia_rename.erl [moved from rabbitmq-server/src/rabbit_mnesia_rename.erl with 98% similarity]
deps/rabbit/src/rabbit_msg_file.erl [moved from rabbitmq-server/src/rabbit_msg_file.erl with 85% similarity]
deps/rabbit/src/rabbit_msg_store.erl [moved from rabbitmq-server/src/rabbit_msg_store.erl with 93% similarity]
deps/rabbit/src/rabbit_msg_store_ets_index.erl [moved from rabbitmq-server/src/rabbit_msg_store_ets_index.erl with 97% similarity]
deps/rabbit/src/rabbit_msg_store_gc.erl [moved from rabbitmq-server/src/rabbit_msg_store_gc.erl with 88% similarity]
deps/rabbit/src/rabbit_node_monitor.erl [moved from rabbitmq-server/src/rabbit_node_monitor.erl with 90% similarity]
deps/rabbit/src/rabbit_parameter_validation.erl [moved from rabbitmq-server/src/rabbit_parameter_validation.erl with 97% similarity]
deps/rabbit/src/rabbit_password.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_password_hashing_md5.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_password_hashing_sha256.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_password_hashing_sha512.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_pbe.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_plugins.erl [moved from rabbitmq-server/src/rabbit_plugins.erl with 88% similarity]
deps/rabbit/src/rabbit_plugins_main.erl [moved from rabbitmq-server/src/rabbit_plugins_main.erl with 97% similarity]
deps/rabbit/src/rabbit_policies.erl [moved from rabbitmq-server/src/rabbit_policies.erl with 87% similarity]
deps/rabbit/src/rabbit_policy.erl [moved from rabbitmq-server/src/rabbit_policy.erl with 90% similarity]
deps/rabbit/src/rabbit_prelaunch.erl [moved from rabbitmq-server/src/rabbit_prelaunch.erl with 84% similarity]
deps/rabbit/src/rabbit_prequeue.erl [moved from rabbitmq-server/src/rabbit_prequeue.erl with 94% similarity]
deps/rabbit/src/rabbit_priority_queue.erl [moved from rabbitmq-server/src/rabbit_priority_queue.erl with 80% similarity]
deps/rabbit/src/rabbit_queue_consumers.erl [moved from rabbitmq-server/src/rabbit_queue_consumers.erl with 96% similarity]
deps/rabbit/src/rabbit_queue_index.erl [moved from rabbitmq-server/src/rabbit_queue_index.erl with 94% similarity]
deps/rabbit/src/rabbit_queue_location_client_local.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_queue_location_min_masters.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_queue_location_random.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_queue_location_validator.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_queue_master_location_misc.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_recovery_terms.erl [moved from rabbitmq-server/src/rabbit_recovery_terms.erl with 90% similarity]
deps/rabbit/src/rabbit_registry.erl [moved from rabbitmq-server/src/rabbit_registry.erl with 91% similarity]
deps/rabbit/src/rabbit_resource_monitor_misc.erl [new file with mode: 0644]
deps/rabbit/src/rabbit_restartable_sup.erl [moved from rabbitmq-server/src/rabbit_restartable_sup.erl with 81% similarity]
deps/rabbit/src/rabbit_router.erl [moved from rabbitmq-server/src/rabbit_router.erl with 90% similarity]
deps/rabbit/src/rabbit_runtime_parameters.erl [moved from rabbitmq-server/src/rabbit_runtime_parameters.erl with 72% similarity]
deps/rabbit/src/rabbit_sasl_report_file_h.erl [moved from rabbitmq-server/src/rabbit_sasl_report_file_h.erl with 97% similarity]
deps/rabbit/src/rabbit_ssl.erl [moved from rabbitmq-server/src/rabbit_ssl.erl with 95% similarity]
deps/rabbit/src/rabbit_sup.erl [moved from rabbitmq-server/src/rabbit_sup.erl with 77% similarity]
deps/rabbit/src/rabbit_table.erl [moved from rabbitmq-server/src/rabbit_table.erl with 95% similarity]
deps/rabbit/src/rabbit_trace.erl [moved from rabbitmq-server/src/rabbit_trace.erl with 88% similarity]
deps/rabbit/src/rabbit_upgrade.erl [moved from rabbitmq-server/src/rabbit_upgrade.erl with 95% similarity]
deps/rabbit/src/rabbit_upgrade_functions.erl [moved from rabbitmq-server/src/rabbit_upgrade_functions.erl with 85% similarity]
deps/rabbit/src/rabbit_variable_queue.erl [moved from rabbitmq-server/src/rabbit_variable_queue.erl with 80% similarity]
deps/rabbit/src/rabbit_version.erl [moved from rabbitmq-server/src/rabbit_version.erl with 77% similarity]
deps/rabbit/src/rabbit_vhost.erl [moved from rabbitmq-server/src/rabbit_vhost.erl with 84% similarity]
deps/rabbit/src/rabbit_vm.erl [moved from rabbitmq-server/src/rabbit_vm.erl with 85% similarity]
deps/rabbit/src/supervised_lifecycle.erl [moved from rabbitmq-server/src/supervised_lifecycle.erl with 89% similarity]
deps/rabbit/src/tcp_listener.erl [new file with mode: 0644]
deps/rabbit/src/tcp_listener_sup.erl [new file with mode: 0644]
deps/rabbit/src/truncate.erl [moved from rabbitmq-server/src/truncate.erl with 66% similarity]
deps/rabbit/src/vm_memory_monitor.erl [moved from rabbitmq-server/src/vm_memory_monitor.erl with 87% similarity]
deps/rabbit/src/worker_pool.erl [moved from rabbitmq-server/src/worker_pool.erl with 77% similarity]
deps/rabbit/src/worker_pool_sup.erl [new file with mode: 0644]
deps/rabbit/src/worker_pool_worker.erl [moved from rabbitmq-server/src/worker_pool_worker.erl with 80% similarity]
deps/rabbit_common/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbit_common/CONTRIBUTING.md [moved from rabbitmq-server/plugins-src/eldap-wrapper/CONTRIBUTING.md with 72% similarity]
deps/rabbit_common/LICENSE [new file with mode: 0644]
deps/rabbit_common/LICENSE-MIT-Erlware-Commons [new file with mode: 0644]
deps/rabbit_common/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MPL-RabbitMQ with 99% similarity]
deps/rabbit_common/Makefile [new file with mode: 0644]
deps/rabbit_common/codegen.py [moved from rabbitmq-server/codegen.py with 65% similarity, mode: 0755]
deps/rabbit_common/erlang.mk [new file with mode: 0644]
deps/rabbit_common/include/old_builtin_types.hrl [new file with mode: 0644]
deps/rabbit_common/include/rabbit.hrl [moved from rabbitmq-server/include/rabbit.hrl with 77% similarity]
deps/rabbit_common/include/rabbit_misc.hrl [new file with mode: 0644]
deps/rabbit_common/include/rabbit_msg_store.hrl [moved from rabbitmq-server/include/rabbit_msg_store.hrl with 96% similarity]
deps/rabbit_common/mk/rabbitmq-build.mk [new file with mode: 0644]
deps/rabbit_common/mk/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbit_common/mk/rabbitmq-dist.mk [new file with mode: 0644]
deps/rabbit_common/mk/rabbitmq-plugin.mk [new file with mode: 0644]
deps/rabbit_common/mk/rabbitmq-run.mk [new file with mode: 0644]
deps/rabbit_common/mk/rabbitmq-tools.mk [new file with mode: 0644]
deps/rabbit_common/src/app_utils.erl [moved from rabbitmq-server/src/app_utils.erl with 98% similarity]
deps/rabbit_common/src/code_version.erl [new file with mode: 0644]
deps/rabbit_common/src/credit_flow.erl [moved from rabbitmq-server/src/credit_flow.erl with 77% similarity]
deps/rabbit_common/src/ec_semver.erl [new file with mode: 0644]
deps/rabbit_common/src/ec_semver_parser.erl [new file with mode: 0644]
deps/rabbit_common/src/gen_server2.erl [moved from rabbitmq-server/src/gen_server2.erl with 98% similarity]
deps/rabbit_common/src/mirrored_supervisor.erl [moved from rabbitmq-server/src/mirrored_supervisor.erl with 92% similarity]
deps/rabbit_common/src/mochijson2.erl [moved from rabbitmq-server/src/mochijson2.erl with 100% similarity]
deps/rabbit_common/src/mochinum.erl [moved from rabbitmq-server/src/mochinum.erl with 100% similarity]
deps/rabbit_common/src/pmon.erl [moved from rabbitmq-server/src/pmon.erl with 84% similarity]
deps/rabbit_common/src/priority_queue.erl [moved from rabbitmq-server/src/priority_queue.erl with 89% similarity]
deps/rabbit_common/src/rabbit_amqqueue.erl [moved from rabbitmq-server/src/rabbit_amqqueue.erl with 69% similarity]
deps/rabbit_common/src/rabbit_auth_backend_dummy.erl [moved from rabbitmq-server/src/rabbit_auth_backend_dummy.erl with 91% similarity]
deps/rabbit_common/src/rabbit_auth_backend_internal.erl [moved from rabbitmq-server/src/rabbit_auth_backend_internal.erl with 61% similarity]
deps/rabbit_common/src/rabbit_auth_mechanism.erl [moved from rabbitmq-server/src/rabbit_auth_mechanism.erl with 85% similarity]
deps/rabbit_common/src/rabbit_authn_backend.erl [moved from rabbitmq-server/src/rabbit_authn_backend.erl with 84% similarity]
deps/rabbit_common/src/rabbit_authz_backend.erl [moved from rabbitmq-server/src/rabbit_authz_backend.erl with 88% similarity]
deps/rabbit_common/src/rabbit_backing_queue.erl [moved from rabbitmq-server/src/rabbit_backing_queue.erl with 86% similarity]
deps/rabbit_common/src/rabbit_basic.erl [moved from rabbitmq-server/src/rabbit_basic.erl with 82% similarity]
deps/rabbit_common/src/rabbit_binary_generator.erl [moved from rabbitmq-server/src/rabbit_binary_generator.erl with 82% similarity]
deps/rabbit_common/src/rabbit_binary_parser.erl [moved from rabbitmq-server/src/rabbit_binary_parser.erl with 87% similarity]
deps/rabbit_common/src/rabbit_channel.erl [moved from rabbitmq-server/src/rabbit_channel.erl with 90% similarity]
deps/rabbit_common/src/rabbit_channel_interceptor.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_command_assembler.erl [moved from rabbitmq-server/src/rabbit_command_assembler.erl with 82% similarity]
deps/rabbit_common/src/rabbit_common.app.src [new file with mode: 0644]
deps/rabbit_common/src/rabbit_control_misc.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_data_coercion.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_error_logger_handler.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_event.erl [moved from rabbitmq-server/src/rabbit_event.erl with 73% similarity]
deps/rabbit_common/src/rabbit_exchange_decorator.erl [moved from rabbitmq-server/src/rabbit_exchange_decorator.erl with 91% similarity]
deps/rabbit_common/src/rabbit_exchange_type.erl [moved from rabbitmq-server/src/rabbit_exchange_type.erl with 85% similarity]
deps/rabbit_common/src/rabbit_health_check.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_heartbeat.erl [moved from rabbitmq-server/src/rabbit_heartbeat.erl with 83% similarity]
deps/rabbit_common/src/rabbit_misc.erl [moved from rabbitmq-server/src/rabbit_misc.erl with 70% similarity]
deps/rabbit_common/src/rabbit_msg_store_index.erl [moved from rabbitmq-server/src/rabbit_msg_store_index.erl with 78% similarity]
deps/rabbit_common/src/rabbit_net.erl [moved from rabbitmq-server/src/rabbit_net.erl with 60% similarity]
deps/rabbit_common/src/rabbit_networking.erl [moved from rabbitmq-server/src/rabbit_networking.erl with 63% similarity]
deps/rabbit_common/src/rabbit_nodes.erl [moved from rabbitmq-server/src/rabbit_nodes.erl with 80% similarity]
deps/rabbit_common/src/rabbit_password_hashing.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_policy_validator.erl [moved from rabbitmq-server/src/rabbit_policy_validator.erl with 77% similarity]
deps/rabbit_common/src/rabbit_queue_collector.erl [moved from rabbitmq-server/src/rabbit_queue_collector.erl with 89% similarity]
deps/rabbit_common/src/rabbit_queue_decorator.erl [moved from rabbitmq-server/src/rabbit_queue_decorator.erl with 86% similarity]
deps/rabbit_common/src/rabbit_queue_master_locator.erl [new file with mode: 0644]
deps/rabbit_common/src/rabbit_reader.erl [moved from rabbitmq-server/src/rabbit_reader.erl with 76% similarity]
deps/rabbit_common/src/rabbit_runtime_parameter.erl [moved from rabbitmq-server/src/rabbit_runtime_parameter.erl with 78% similarity]
deps/rabbit_common/src/rabbit_types.erl [moved from rabbitmq-server/src/rabbit_types.erl with 97% similarity]
deps/rabbit_common/src/rabbit_writer.erl [moved from rabbitmq-server/src/rabbit_writer.erl with 82% similarity]
deps/rabbit_common/src/rand_compat.erl [new file with mode: 0644]
deps/rabbit_common/src/ssl_compat.erl [new file with mode: 0644]
deps/rabbit_common/src/supervisor2.erl [moved from rabbitmq-server/src/supervisor2.erl with 95% similarity]
deps/rabbit_common/src/time_compat.erl [new file with mode: 0644]
deps/rabbitmq_amqp1_0/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_amqp1_0/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_amqp1_0/Makefile [new file with mode: 0644]
deps/rabbitmq_amqp1_0/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/README.md with 100% similarity]
deps/rabbitmq_amqp1_0/codegen.py [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/codegen.py with 72% similarity]
deps/rabbitmq_amqp1_0/erlang.mk [new file with mode: 0644]
deps/rabbitmq_amqp1_0/include/rabbit_amqp1_0.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/include/rabbit_amqp1_0.hrl with 100% similarity]
deps/rabbitmq_amqp1_0/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_amqp1_0/spec/messaging.xml [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/messaging.xml with 100% similarity]
deps/rabbitmq_amqp1_0/spec/security.xml [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/security.xml with 100% similarity]
deps/rabbitmq_amqp1_0/spec/transactions.xml [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/transactions.xml with 100% similarity]
deps/rabbitmq_amqp1_0/spec/transport.xml [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/transport.xml with 100% similarity]
deps/rabbitmq_amqp1_0/spec/types.xml [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/types.xml with 100% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_generator.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_generator.erl with 79% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_parser.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_parser.erl with 97% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_channel.erl with 97% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_framing.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_framing.erl with 98% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_incoming_link.erl with 98% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_link_util.erl with 83% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_message.erl with 99% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_outgoing_link.erl with 98% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_reader.erl with 99% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session.erl with 99% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_process.erl with 99% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup.erl with 85% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup_sup.erl with 82% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_util.erl with 81% similarity]
deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_writer.erl with 93% similarity]
deps/rabbitmq_amqp1_0/src/rabbitmq_amqp1_0.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbitmq_amqp1_0.app.src with 71% similarity]
deps/rabbitmq_auth_backend_ldap/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/Makefile [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/README-authorisation.md [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/README-tests.md [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/README.md [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/Vagrantfile [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/erlang.mk [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/example/global.ldif [moved from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/global.ldif with 71% similarity]
deps/rabbitmq_auth_backend_ldap/example/memberof_init.ldif [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/example/refint_1.ldif [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/example/refint_2.ldif [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/example/seed.sh [new file with mode: 0755]
deps/rabbitmq_auth_backend_ldap/example/setup.sh [new file with mode: 0755]
deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl [moved from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap.erl with 54% similarity]
deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_app.erl with 78% similarity]
deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_util.erl with 94% similarity]
deps/rabbitmq_auth_backend_ldap/src/rabbitmq_auth_backend_ldap.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbitmq_auth_backend_ldap.app.src with 76% similarity]
deps/rabbitmq_auth_mechanism_ssl/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_auth_mechanism_ssl/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_auth_mechanism_ssl/Makefile [new file with mode: 0644]
deps/rabbitmq_auth_mechanism_ssl/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/README with 75% similarity]
deps/rabbitmq_auth_mechanism_ssl/erlang.mk [new file with mode: 0644]
deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl [moved from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl.erl with 97% similarity]
deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl_app.erl with 94% similarity]
deps/rabbitmq_auth_mechanism_ssl/src/rabbitmq_auth_mechanism_ssl.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbitmq_auth_mechanism_ssl.app.src with 76% similarity]
deps/rabbitmq_codegen/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_codegen/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_codegen/LICENSE [moved from rabbitmq-server/codegen/LICENSE with 100% similarity]
deps/rabbitmq_codegen/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MPL-RabbitMQ with 100% similarity]
deps/rabbitmq_codegen/Makefile [moved from rabbitmq-server/codegen/Makefile with 64% similarity]
deps/rabbitmq_codegen/README.extensions.md [moved from rabbitmq-server/codegen/README.extensions.md with 100% similarity]
deps/rabbitmq_codegen/amqp-rabbitmq-0.8.json [moved from rabbitmq-server/codegen/amqp-rabbitmq-0.8.json with 99% similarity]
deps/rabbitmq_codegen/amqp-rabbitmq-0.9.1.json [moved from rabbitmq-server/codegen/amqp-rabbitmq-0.9.1.json with 99% similarity]
deps/rabbitmq_codegen/amqp_codegen.py [moved from rabbitmq-server/codegen/amqp_codegen.py with 92% similarity]
deps/rabbitmq_codegen/credit_extension.json [moved from rabbitmq-server/codegen/credit_extension.json with 97% similarity]
deps/rabbitmq_codegen/demo_extension.json [moved from rabbitmq-server/codegen/demo_extension.json with 100% similarity]
deps/rabbitmq_codegen/license_info [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/LICENSE [moved from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE with 100% similarity]
deps/rabbitmq_consistent_hash_exchange/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE-MPL-RabbitMQ with 99% similarity]
deps/rabbitmq_consistent_hash_exchange/Makefile [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/README.md [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/erlang.mk [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl [moved from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbit_exchange_type_consistent_hash.erl with 73% similarity]
deps/rabbitmq_consistent_hash_exchange/src/rabbitmq_consistent_hash_exchange.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbitmq_consistent_hash_exchange.app.src with 65% similarity]
deps/rabbitmq_event_exchange/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_event_exchange/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_event_exchange/LICENSE [new file with mode: 0644]
deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ [new file with mode: 0644]
deps/rabbitmq_event_exchange/Makefile [new file with mode: 0644]
deps/rabbitmq_event_exchange/README.md [new file with mode: 0644]
deps/rabbitmq_event_exchange/erlang.mk [new file with mode: 0644]
deps/rabbitmq_event_exchange/examples/java/QueueEvents.java [new file with mode: 0644]
deps/rabbitmq_event_exchange/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl [new file with mode: 0644]
deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src [new file with mode: 0644]
deps/rabbitmq_federation/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_federation/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_federation/Makefile [new file with mode: 0644]
deps/rabbitmq_federation/README-hacking [moved from rabbitmq-server/plugins-src/rabbitmq-federation/README-hacking with 100% similarity]
deps/rabbitmq_federation/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-federation/README.md with 100% similarity]
deps/rabbitmq_federation/erlang.mk [new file with mode: 0644]
deps/rabbitmq_federation/etc/rabbit-test.sh [moved from rabbitmq-server/plugins-src/rabbitmq-federation/etc/rabbit-test.sh with 100% similarity]
deps/rabbitmq_federation/etc/setup-rabbit-test.sh [new file with mode: 0755]
deps/rabbitmq_federation/include/rabbit_federation.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/include/rabbit_federation.hrl with 95% similarity]
deps/rabbitmq_federation/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_federation/src/rabbit_federation_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_app.erl with 96% similarity]
deps/rabbitmq_federation/src/rabbit_federation_db.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_db.erl with 96% similarity]
deps/rabbitmq_federation/src/rabbit_federation_event.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_event.erl with 96% similarity]
deps/rabbitmq_federation/src/rabbit_federation_exchange.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange.erl with 98% similarity]
deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link.erl with 98% similarity]
deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link_sup_sup.erl with 95% similarity]
deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_sup.erl with 96% similarity]
deps/rabbitmq_federation/src/rabbit_federation_link_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_util.erl with 89% similarity]
deps/rabbitmq_federation/src/rabbit_federation_parameters.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_parameters.erl with 98% similarity]
deps/rabbitmq_federation/src/rabbit_federation_queue.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue.erl with 98% similarity]
deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link.erl with 96% similarity]
deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link_sup_sup.erl with 95% similarity]
deps/rabbitmq_federation/src/rabbit_federation_status.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_status.erl with 98% similarity]
deps/rabbitmq_federation/src/rabbit_federation_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_sup.erl with 91% similarity]
deps/rabbitmq_federation/src/rabbit_federation_upstream.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream.erl with 97% similarity]
deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream_exchange.erl with 97% similarity]
deps/rabbitmq_federation/src/rabbit_federation_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_util.erl with 97% similarity]
deps/rabbitmq_federation/src/rabbitmq_federation.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbitmq_federation.app.src with 67% similarity]
deps/rabbitmq_federation_management/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_federation_management/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_federation_management/LICENSE [new file with mode: 0644]
deps/rabbitmq_federation_management/LICENSE-APACHE2-ExplorerCanvas [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-APACHE2-ExplorerCanvas with 100% similarity]
deps/rabbitmq_federation_management/LICENSE-BSD-base64js [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-BSD-base64js with 100% similarity]
deps/rabbitmq_federation_management/LICENSE-MIT-EJS10 [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-EJS10 with 100% similarity]
deps/rabbitmq_federation_management/LICENSE-MIT-Flot [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-Flot with 100% similarity]
deps/rabbitmq_federation_management/LICENSE-MIT-Sammy060 [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-Sammy060 with 100% similarity]
deps/rabbitmq_federation_management/LICENSE-MIT-jQuery164 [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-jQuery164 with 100% similarity]
deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ [new file with mode: 0644]
deps/rabbitmq_federation_management/Makefile [new file with mode: 0644]
deps/rabbitmq_federation_management/README.md [new file with mode: 0644]
deps/rabbitmq_federation_management/erlang.mk [new file with mode: 0644]
deps/rabbitmq_federation_management/priv/www/js/federation.js [moved from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/federation.js with 90% similarity]
deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstream.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstream.ejs with 72% similarity]
deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstreams.ejs with 81% similarity]
deps/rabbitmq_federation_management/priv/www/js/tmpl/federation.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation.ejs with 100% similarity]
deps/rabbitmq_federation_management/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl [moved from rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbit_federation_mgmt.erl with 98% similarity]
deps/rabbitmq_federation_management/src/rabbitmq_federation_management.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbitmq_federation_management.app.src with 61% similarity]
deps/rabbitmq_jms_topic_exchange/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/LICENSE [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/LICENSES.txt [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/Makefile [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/README.md [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/erlang.mk [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/include/rabbit_jms_topic_exchange.hrl [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/src/rabbitmq_jms_topic_exchange.app.src [new file with mode: 0644]
deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl [new file with mode: 0644]
deps/rabbitmq_management/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_management/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_management/LICENSE [moved from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE with 100% similarity]
deps/rabbitmq_management/LICENSE-APACHE2-ExplorerCanvas [moved from rabbitmq-server/plugins-src/webmachine-wrapper/LICENSE-Apache-Basho with 89% similarity]
deps/rabbitmq_management/LICENSE-BSD-base64js [new file with mode: 0644]
deps/rabbitmq_management/LICENSE-MIT-EJS10 [moved from rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/LICENSE with 94% similarity]
deps/rabbitmq_management/LICENSE-MIT-Flot [new file with mode: 0644]
deps/rabbitmq_management/LICENSE-MIT-Sammy060 [new file with mode: 0644]
deps/rabbitmq_management/LICENSE-MIT-jQuery164 [new file with mode: 0644]
deps/rabbitmq_management/LICENSE-MPL-RabbitMQ [new file with mode: 0644]
deps/rabbitmq_management/Makefile [new file with mode: 0644]
deps/rabbitmq_management/README.md [new file with mode: 0644]
deps/rabbitmq_management/bin/rabbitmqadmin [moved from rabbitmq-server/plugins-src/rabbitmq-management/bin/rabbitmqadmin with 87% similarity]
deps/rabbitmq_management/erlang.mk [new file with mode: 0644]
deps/rabbitmq_management/include/rabbit_mgmt.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt.hrl with 78% similarity]
deps/rabbitmq_management/include/rabbit_mgmt_event_collector.hrl [new file with mode: 0644]
deps/rabbitmq_management/include/rabbit_mgmt_metrics.hrl [new file with mode: 0644]
deps/rabbitmq_management/include/rabbit_mgmt_test.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt_test.hrl with 89% similarity]
deps/rabbitmq_management/license_info [moved from rabbitmq-server/plugins-src/rabbitmq-management/license_info with 100% similarity]
deps/rabbitmq_management/priv/www/api/index.html [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/api/index.html with 88% similarity]
deps/rabbitmq_management/priv/www/cli/index.html [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/cli/index.html with 100% similarity]
deps/rabbitmq_management/priv/www/css/evil.css [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/evil.css with 100% similarity]
deps/rabbitmq_management/priv/www/css/main.css [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/main.css with 99% similarity]
deps/rabbitmq_management/priv/www/doc/stats.html [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/doc/stats.html with 100% similarity]
deps/rabbitmq_management/priv/www/favicon.ico [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/favicon.ico with 100% similarity]
deps/rabbitmq_management/priv/www/img/bg-binary.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-binary.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/bg-green-dark.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-green-dark.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/bg-red-dark.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-red-dark.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/bg-red.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-red.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/bg-yellow-dark.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-yellow-dark.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/collapse.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/collapse.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/expand.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/expand.png with 100% similarity]
deps/rabbitmq_management/priv/www/img/rabbitmqlogo.png [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/rabbitmqlogo.png with 100% similarity]
deps/rabbitmq_management/priv/www/index.html [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/index.html with 90% similarity]
deps/rabbitmq_management/priv/www/js/base64.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/base64.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/charts.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/charts.js with 96% similarity]
deps/rabbitmq_management/priv/www/js/dispatcher.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/dispatcher.js with 92% similarity]
deps/rabbitmq_management/priv/www/js/ejs.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/ejs.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/ejs.min.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/ejs.min.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/excanvas.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/excanvas.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/excanvas.min.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/excanvas.min.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/formatters.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/formatters.js with 83% similarity]
deps/rabbitmq_management/priv/www/js/global.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/global.js with 96% similarity]
deps/rabbitmq_management/priv/www/js/help.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/help.js with 96% similarity]
deps/rabbitmq_management/priv/www/js/jquery-1.6.4.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery-1.6.4.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/jquery-1.6.4.min.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery-1.6.4.min.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/jquery.flot.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/jquery.flot.min.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.min.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/jquery.flot.time.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.time.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/jquery.flot.time.min.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.time.min.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/json2.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/json2.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/main.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/main.js with 82% similarity]
deps/rabbitmq_management/priv/www/js/prefs.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/prefs.js with 100% similarity]
deps/rabbitmq_management/priv/www/js/sammy.js [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.js with 62% similarity]
deps/rabbitmq_management/priv/www/js/sammy.min.js [new file with mode: 0644]
deps/rabbitmq_management/priv/www/js/tmpl/404.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/404.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/add-binding.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/add-binding.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/binary.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/binary.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/bindings.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/bindings.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channel.ejs with 64% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/channels-list.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels-list.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/channels.ejs [new file with mode: 0644]
deps/rabbitmq_management/priv/www/js/tmpl/cluster-name.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/cluster-name.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/columns-options.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/columns-options.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/connection.ejs with 66% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/connections.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/connections.ejs with 89% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/consumers.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/consumers.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/error-popup.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/error-popup.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/exchange.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/exchange.ejs with 97% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/exchanges.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/exchanges.ejs with 95% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/import-succeeded.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/import-succeeded.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/layout.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/layout.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/login.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/login.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/memory-bar.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/memory-bar.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/memory-table.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/memory-table.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/memory.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/memory.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/messages.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/messages.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/msg-detail-deliveries.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/msg-detail-deliveries.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/msg-detail-publishes.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/msg-detail-publishes.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/node.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/node.ejs with 91% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/overview.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/overview.ejs with 88% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/partition.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/partition.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/paths.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/paths.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/permissions.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/permissions.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/policies.ejs with 97% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/policy.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/policy.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/publish.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/publish.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/queue.ejs with 89% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/queues.ejs with 98% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/rate-options.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/rate-options.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/registry.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/registry.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/status.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/status.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/user.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/user.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/users.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/users.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/vhost.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/vhost.ejs with 100% similarity]
deps/rabbitmq_management/priv/www/js/tmpl/vhosts.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/vhosts.ejs with 100% similarity]
deps/rabbitmq_management/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_app.erl with 98% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_channel_stats_collector.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_cors.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_db.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_dispatcher.erl with 80% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_event_collector.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_event_collector_utils.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_extension.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_extension.erl with 100% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_format.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_format.erl with 59% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_load_definitions.erl with 96% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_queue_stats_collector.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_reset_handler.erl with 97% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_stats.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_stats_gc.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_stats_tables.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_sup.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup_sup.erl with 82% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_util.erl with 74% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_aliveness_test.erl with 82% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_binding.erl with 92% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_bindings.erl with 91% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_channel.erl with 70% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_channels.erl with 65% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_cluster_name.erl with 77% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_connection.erl with 77% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_connection_channels.erl with 71% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_connections.erl with 65% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_consumers.erl with 80% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_definitions.erl with 66% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_exchange.erl with 76% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_exchange_publish.erl with 92% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_exchanges.erl with 71% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_extensions.erl with 77% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_node.erl with 83% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl [new file with mode: 0644]
deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_nodes.erl with 71% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_overview.erl with 67% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_parameter.erl with 84% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_parameters.erl with 61% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permission.erl with 88% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permissions.erl with 75% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permissions_user.erl with 77% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permissions_vhost.erl with 77% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_policies.erl with 76% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_policy.erl with 87% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue.erl with 74% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue_actions.erl with 87% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue_get.erl with 90% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue_purge.erl with 78% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queues.erl with 67% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_user.erl with 57% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_users.erl with 77% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_vhost.erl with 75% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_vhosts.erl with 67% similarity]
deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_whoami.erl with 73% similarity]
deps/rabbitmq_management/src/rabbitmq_management.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbitmq_management.app.src with 67% similarity]
deps/rabbitmq_management_agent/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_management_agent/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_management_agent/LICENSE [new file with mode: 0644]
deps/rabbitmq_management_agent/LICENSE-MPL-RabbitMQ [new file with mode: 0644]
deps/rabbitmq_management_agent/Makefile [new file with mode: 0644]
deps/rabbitmq_management_agent/erlang.mk [new file with mode: 0644]
deps/rabbitmq_management_agent/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_agent_app.erl with 92% similarity]
deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_agent_sup.erl with 94% similarity]
deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_db_handler.erl with 81% similarity]
deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_external_stats.erl with 92% similarity]
deps/rabbitmq_management_agent/src/rabbitmq_management_agent.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbitmq_management_agent.app.src with 68% similarity]
deps/rabbitmq_management_visualiser/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_management_visualiser/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_management_visualiser/LICENSE [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/LICENSE with 100% similarity]
deps/rabbitmq_management_visualiser/LICENSE-BSD-glMatrix [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/LICENSE-BSD-glMatrix with 100% similarity]
deps/rabbitmq_management_visualiser/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/LICENSE-MPL-RabbitMQ with 99% similarity]
deps/rabbitmq_management_visualiser/Makefile [new file with mode: 0644]
deps/rabbitmq_management_visualiser/README [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/README with 100% similarity]
deps/rabbitmq_management_visualiser/erlang.mk [new file with mode: 0644]
deps/rabbitmq_management_visualiser/license_info [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/license_info with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/js/visualiser.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/js/visualiser.js with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/index.html [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/index.html with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/js/glMatrix-min.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/glMatrix-min.js with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/js/glMatrix.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/glMatrix.js with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/js/main.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/main.js with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/js/model.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/model.js with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/js/octtree.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/octtree.js with 100% similarity]
deps/rabbitmq_management_visualiser/priv/www/visualiser/js/physics.js [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/physics.js with 100% similarity]
deps/rabbitmq_management_visualiser/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_management_visualiser/src/rabbit_mgmt_wm_all.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/src/rabbit_mgmt_wm_all.erl with 96% similarity]
deps/rabbitmq_management_visualiser/src/rabbit_visualiser_mgmt.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/src/rabbit_visualiser_mgmt.erl with 92% similarity]
deps/rabbitmq_management_visualiser/src/rabbitmq_management_visualiser.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/src/rabbitmq_management_visualiser.app.src with 55% similarity]
deps/rabbitmq_mqtt/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_mqtt/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_mqtt/Makefile [new file with mode: 0644]
deps/rabbitmq_mqtt/README.md [new file with mode: 0644]
deps/rabbitmq_mqtt/erlang.mk [new file with mode: 0644]
deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/include/rabbit_mqtt.hrl with 63% similarity]
deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/include/rabbit_mqtt_frame.hrl with 82% similarity]
deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl [new file with mode: 0644]
deps/rabbitmq_mqtt/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbit_mqtt.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt.erl with 68% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_collector.erl with 98% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_connection_sup.erl with 74% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_frame.erl with 97% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_processor.erl with 55% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_reader.erl with 62% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_sup.erl with 52% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_util.erl with 63% similarity]
deps/rabbitmq_mqtt/src/rabbit_mqtt_vhost_event_handler.erl [new file with mode: 0644]
deps/rabbitmq_mqtt/src/rabbitmq_mqtt.app.src [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/LICENSE.md [moved from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-eldap with 94% similarity]
deps/rabbitmq_recent_history_exchange/Makefile [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/README.md [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/erlang.mk [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/etc/rabbit-hare.config [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/etc/rabbit-test.config [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl [new file with mode: 0644]
deps/rabbitmq_recent_history_exchange/src/rabbitmq_recent_history_exchange.app.src [new file with mode: 0644]
deps/rabbitmq_sharding/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_sharding/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_sharding/LICENSE [new file with mode: 0644]
deps/rabbitmq_sharding/LICENSE-MPL-RabbitMQ [new file with mode: 0644]
deps/rabbitmq_sharding/LICENSE-MPL2 [new file with mode: 0644]
deps/rabbitmq_sharding/Makefile [new file with mode: 0644]
deps/rabbitmq_sharding/README.extra.md [new file with mode: 0644]
deps/rabbitmq_sharding/README.md [new file with mode: 0644]
deps/rabbitmq_sharding/docs/sharded_queues.png [new file with mode: 0644]
deps/rabbitmq_sharding/erlang.mk [new file with mode: 0644]
deps/rabbitmq_sharding/etc/rabbit-hare.config [new file with mode: 0644]
deps/rabbitmq_sharding/etc/rabbit-test.config [new file with mode: 0644]
deps/rabbitmq_sharding/etc/rkey.sh [new file with mode: 0755]
deps/rabbitmq_sharding/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbit_sharding_util.erl [new file with mode: 0644]
deps/rabbitmq_sharding/src/rabbitmq_sharding.app.src [new file with mode: 0644]
deps/rabbitmq_shovel/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_shovel/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_shovel/Makefile [new file with mode: 0644]
deps/rabbitmq_shovel/README.md [new file with mode: 0644]
deps/rabbitmq_shovel/erlang.mk [new file with mode: 0644]
deps/rabbitmq_shovel/include/rabbit_shovel.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/include/rabbit_shovel.hrl with 93% similarity]
deps/rabbitmq_shovel/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_shovel/src/rabbit_shovel.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel.erl with 92% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_config.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_config.erl with 99% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_dyn_worker_sup.erl with 95% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl with 94% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_parameters.erl with 99% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_status.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_status.erl with 97% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_sup.erl with 98% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_util.erl with 94% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_worker.erl with 91% similarity]
deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_worker_sup.erl with 96% similarity]
deps/rabbitmq_shovel/src/rabbitmq_shovel.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbitmq_shovel.app.src with 81% similarity]
deps/rabbitmq_shovel_management/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_shovel_management/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_shovel_management/Makefile [new file with mode: 0644]
deps/rabbitmq_shovel_management/README.md [new file with mode: 0644]
deps/rabbitmq_shovel_management/erlang.mk [new file with mode: 0644]
deps/rabbitmq_shovel_management/priv/www/js/shovel.js [moved from rabbitmq-server/plugins-src/rabbitmq-shovel-management/priv/www/js/shovel.js with 100% similarity]
deps/rabbitmq_shovel_management/priv/www/js/tmpl/dynamic-shovel.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-shovel-management/priv/www/js/tmpl/dynamic-shovel.ejs with 100% similarity]
deps/rabbitmq_shovel_management/priv/www/js/tmpl/dynamic-shovels.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-shovel-management/priv/www/js/tmpl/dynamic-shovels.ejs with 100% similarity]
deps/rabbitmq_shovel_management/priv/www/js/tmpl/shovels.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-shovel-management/priv/www/js/tmpl/shovels.ejs with 100% similarity]
deps/rabbitmq_shovel_management/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl [moved from rabbitmq-server/plugins-src/rabbitmq-shovel-management/src/rabbit_shovel_mgmt.erl with 98% similarity]
deps/rabbitmq_shovel_management/src/rabbitmq_shovel_management.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-shovel-management/src/rabbitmq_shovel_management.app.src with 53% similarity]
deps/rabbitmq_stomp/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_stomp/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_stomp/Makefile [new file with mode: 0644]
deps/rabbitmq_stomp/NOTES [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/NOTES with 100% similarity]
deps/rabbitmq_stomp/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/README.md with 70% similarity]
deps/rabbitmq_stomp/erlang.mk [new file with mode: 0644]
deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_recv.pl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/perl/rabbitmq_stomp_recv.pl with 100% similarity]
deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_client.pl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/perl/rabbitmq_stomp_rpc_client.pl with 100% similarity]
deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_service.pl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/perl/rabbitmq_stomp_rpc_service.pl with 100% similarity]
deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send.pl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/perl/rabbitmq_stomp_send.pl with 100% similarity]
deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send_many.pl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/perl/rabbitmq_stomp_send_many.pl with 100% similarity]
deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_slow_recv.pl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/perl/rabbitmq_stomp_slow_recv.pl with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/cb-receiver.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/cb-receiver.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/cb-sender.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/cb-sender.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/cb-slow-receiver.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/cb-slow-receiver.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/persistent-receiver.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/persistent-receiver.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/persistent-sender.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/persistent-sender.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/topic-broadcast-receiver.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/topic-broadcast-receiver.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/topic-broadcast-with-unsubscribe.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/topic-broadcast-with-unsubscribe.rb with 100% similarity]
deps/rabbitmq_stomp/examples/ruby/topic-sender.rb [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/examples/ruby/topic-sender.rb with 100% similarity]
deps/rabbitmq_stomp/include/rabbit_stomp.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/include/rabbit_stomp.hrl with 87% similarity]
deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/include/rabbit_stomp_frame.hrl with 91% similarity]
deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/include/rabbit_stomp_headers.hrl with 87% similarity]
deps/rabbitmq_stomp/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_stomp/src/rabbit_stomp.erl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp.erl with 97% similarity]
deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_client_sup.erl with 68% similarity]
deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_frame.erl with 99% similarity]
deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_processor.erl with 78% similarity]
deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl [new file with mode: 0644]
deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_sup.erl with 51% similarity]
deps/rabbitmq_stomp/src/rabbit_stomp_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_util.erl with 85% similarity]
deps/rabbitmq_stomp/src/rabbitmq_stomp.app.src [new file with mode: 0644]
deps/rabbitmq_top/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_top/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_top/Makefile [new file with mode: 0644]
deps/rabbitmq_top/README.md [new file with mode: 0644]
deps/rabbitmq_top/erlang.mk [new file with mode: 0644]
deps/rabbitmq_top/priv/www/js/tmpl/ets_tables.ejs [new file with mode: 0644]
deps/rabbitmq_top/priv/www/js/tmpl/process.ejs [new file with mode: 0644]
deps/rabbitmq_top/priv/www/js/tmpl/processes.ejs [new file with mode: 0644]
deps/rabbitmq_top/priv/www/js/top.js [new file with mode: 0644]
deps/rabbitmq_top/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_top/src/rabbit_top_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_all.erl with 57% similarity]
deps/rabbitmq_top/src/rabbit_top_extension.erl [new file with mode: 0644]
deps/rabbitmq_top/src/rabbit_top_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup.erl with 53% similarity]
deps/rabbitmq_top/src/rabbit_top_util.erl [new file with mode: 0644]
deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl [new file with mode: 0644]
deps/rabbitmq_top/src/rabbit_top_wm_process.erl [new file with mode: 0644]
deps/rabbitmq_top/src/rabbit_top_wm_processes.erl [new file with mode: 0644]
deps/rabbitmq_top/src/rabbit_top_worker.erl [new file with mode: 0644]
deps/rabbitmq_top/src/rabbitmq_top.app.src [new file with mode: 0644]
deps/rabbitmq_tracing/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_tracing/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_tracing/Makefile [new file with mode: 0644]
deps/rabbitmq_tracing/README.md [new file with mode: 0644]
deps/rabbitmq_tracing/erlang.mk [new file with mode: 0644]
deps/rabbitmq_tracing/priv/www/js/tmpl/traces.ejs [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/priv/www/js/tmpl/traces.ejs with 100% similarity]
deps/rabbitmq_tracing/priv/www/js/tracing.js [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/priv/www/js/tracing.js with 100% similarity]
deps/rabbitmq_tracing/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_tracing/src/rabbit_tracing_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_app.erl with 92% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_consumer.erl with 92% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_consumer_sup.erl with 90% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_files.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_files.erl with 96% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_mgmt.erl with 94% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_sup.erl with 90% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_traces.erl with 98% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_util.erl [new file with mode: 0644]
deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_file.erl with 96% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_files.erl with 95% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_trace.erl with 98% similarity]
deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl [moved from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_traces.erl with 95% similarity]
deps/rabbitmq_tracing/src/rabbitmq_tracing.app.src [new file with mode: 0644]
deps/rabbitmq_trust_store/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_trust_store/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_trust_store/Makefile [new file with mode: 0644]
deps/rabbitmq_trust_store/README.md [new file with mode: 0644]
deps/rabbitmq_trust_store/erlang.mk [new file with mode: 0644]
deps/rabbitmq_trust_store/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_trust_store/src/rabbit_trust_store.erl [new file with mode: 0644]
deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl [new file with mode: 0644]
deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl [new file with mode: 0644]
deps/rabbitmq_trust_store/src/rabbitmq_trust_store.app.src [new file with mode: 0644]
deps/rabbitmq_web_dispatch/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_web_dispatch/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_web_dispatch/LICENSE [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/LICENSE with 100% similarity]
deps/rabbitmq_web_dispatch/Makefile [new file with mode: 0644]
deps/rabbitmq_web_dispatch/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/README.md with 100% similarity]
deps/rabbitmq_web_dispatch/erlang.mk [new file with mode: 0644]
deps/rabbitmq_web_dispatch/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch.erl with 96% similarity]
deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_app.erl with 93% similarity]
deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_registry.erl with 98% similarity]
deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_sup.erl with 98% similarity]
deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_util.erl with 94% similarity]
deps/rabbitmq_web_dispatch/src/rabbit_webmachine.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_webmachine.erl with 100% similarity]
deps/rabbitmq_web_dispatch/src/rabbit_webmachine_error_handler.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_webmachine_error_handler.erl with 75% similarity]
deps/rabbitmq_web_dispatch/src/rabbitmq_web_dispatch.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbitmq_web_dispatch.app.src with 62% similarity]
deps/rabbitmq_web_stomp/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_web_stomp/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_web_stomp/LICENSE [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/LICENSE with 100% similarity]
deps/rabbitmq_web_stomp/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/LICENSE-MPL-RabbitMQ with 100% similarity]
deps/rabbitmq_web_stomp/Makefile [new file with mode: 0644]
deps/rabbitmq_web_stomp/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/README.md with 98% similarity]
deps/rabbitmq_web_stomp/erlang.mk [new file with mode: 0644]
deps/rabbitmq_web_stomp/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_web_stomp/src/rabbit_ws_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_app.erl with 92% similarity]
deps/rabbitmq_web_stomp/src/rabbit_ws_client.erl [new file with mode: 0644]
deps/rabbitmq_web_stomp/src/rabbit_ws_client_sup.erl [new file with mode: 0644]
deps/rabbitmq_web_stomp/src/rabbit_ws_handler.erl [new file with mode: 0644]
deps/rabbitmq_web_stomp/src/rabbit_ws_sockjs.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_sockjs.erl with 53% similarity]
deps/rabbitmq_web_stomp/src/rabbit_ws_sup.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_sup.erl with 94% similarity]
deps/rabbitmq_web_stomp/src/rabbitmq_web_stomp.app.src [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/CODE_OF_CONDUCT.md [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/CONTRIBUTING.md [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/LICENSE [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/LICENSE with 100% similarity]
deps/rabbitmq_web_stomp_examples/LICENSE-APL2-Stomp-Websocket [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/LICENSE-APL2-Stomp-Websocket with 100% similarity]
deps/rabbitmq_web_stomp_examples/LICENSE-MPL-RabbitMQ [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp/LICENSE-MPL-RabbitMQ with 100% similarity]
deps/rabbitmq_web_stomp_examples/Makefile [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/README.md [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/README.md with 100% similarity]
deps/rabbitmq_web_stomp_examples/erlang.mk [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/priv/bunny.html [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/bunny.html with 92% similarity]
deps/rabbitmq_web_stomp_examples/priv/bunny.png [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/bunny.png with 100% similarity]
deps/rabbitmq_web_stomp_examples/priv/echo.html [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/echo.html with 88% similarity]
deps/rabbitmq_web_stomp_examples/priv/index.html [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/index.html with 62% similarity]
deps/rabbitmq_web_stomp_examples/priv/main.css [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/main.css with 100% similarity]
deps/rabbitmq_web_stomp_examples/priv/pencil.cur [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/pencil.cur with 100% similarity]
deps/rabbitmq_web_stomp_examples/priv/stomp.js [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/priv/temp-queue.html [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/temp-queue.html with 91% similarity]
deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk [new file with mode: 0644]
deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/src/rabbit_web_stomp_examples_app.erl with 95% similarity]
deps/rabbitmq_web_stomp_examples/src/rabbitmq_web_stomp_examples.app.src [moved from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/src/rabbitmq_web_stomp_examples.app.src with 63% similarity]
deps/ranch/AUTHORS [new file with mode: 0644]
deps/ranch/CHANGELOG.asciidoc [new file with mode: 0644]
deps/ranch/LICENSE [new file with mode: 0644]
deps/ranch/Makefile [new file with mode: 0644]
deps/ranch/README.asciidoc [new file with mode: 0644]
deps/ranch/appveyor.yml [new file with mode: 0644]
deps/ranch/circle.yml [new file with mode: 0644]
deps/ranch/doc/src/guide/book.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/embedded.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/internals.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/introduction.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/listeners.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/parsers.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/protocols.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/ssl_auth.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/guide/transports.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/manual/ranch.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/manual/ranch_app.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/manual/ranch_protocol.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/manual/ranch_ssl.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/manual/ranch_tcp.asciidoc [new file with mode: 0644]
deps/ranch/doc/src/manual/ranch_transport.asciidoc [new file with mode: 0644]
deps/ranch/erlang.mk [new file with mode: 0644]
deps/ranch/examples/tcp_echo/Makefile [new file with mode: 0644]
deps/ranch/examples/tcp_echo/README.md [new file with mode: 0644]
deps/ranch/examples/tcp_echo/relx.config [new file with mode: 0644]
deps/ranch/examples/tcp_echo/src/echo_protocol.erl [new file with mode: 0644]
deps/ranch/examples/tcp_echo/src/tcp_echo.app.src [new file with mode: 0644]
deps/ranch/examples/tcp_echo/src/tcp_echo_app.erl [new file with mode: 0644]
deps/ranch/examples/tcp_echo/src/tcp_echo_sup.erl [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/Makefile [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/README.md [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/relx.config [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/src/reverse_protocol.erl [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/src/tcp_reverse.app.src [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/src/tcp_reverse_app.erl [new file with mode: 0644]
deps/ranch/examples/tcp_reverse/src/tcp_reverse_sup.erl [new file with mode: 0644]
deps/ranch/src/ranch.app.src [new file with mode: 0644]
deps/ranch/src/ranch.erl [new file with mode: 0644]
deps/ranch/src/ranch_acceptor.erl [new file with mode: 0644]
deps/ranch/src/ranch_acceptors_sup.erl [new file with mode: 0644]
deps/ranch/src/ranch_app.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_app.erl with 69% similarity]
deps/ranch/src/ranch_conns_sup.erl [new file with mode: 0644]
deps/ranch/src/ranch_listener_sup.erl [new file with mode: 0644]
deps/ranch/src/ranch_protocol.erl [new file with mode: 0644]
deps/ranch/src/ranch_server.erl [new file with mode: 0644]
deps/ranch/src/ranch_ssl.erl [new file with mode: 0644]
deps/ranch/src/ranch_sup.erl [moved from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/proper_SUITE.erl with 56% similarity]
deps/ranch/src/ranch_tcp.erl [new file with mode: 0644]
deps/ranch/src/ranch_transport.erl [new file with mode: 0644]
deps/sockjs/COPYING [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/COPYING with 73% similarity]
deps/sockjs/Changelog [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/Changelog with 100% similarity]
deps/sockjs/LICENSE-APL2-Rebar [moved from rabbitmq-server/plugins-src/licensing/LICENSE-Apache-Basho with 100% similarity]
deps/sockjs/LICENSE-EPL-OTP [new file with mode: 0644]
deps/sockjs/LICENSE-MIT-Mochiweb [new file with mode: 0644]
deps/sockjs/LICENSE-MIT-SockJS [moved from rabbitmq-server/plugins-src/eldap-wrapper/LICENSE-MIT-eldap with 96% similarity]
deps/sockjs/Makefile [new file with mode: 0644]
deps/sockjs/Makefile.orig.mk [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/Makefile with 100% similarity]
deps/sockjs/README.md [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/README.md with 69% similarity]
deps/sockjs/examples/cowboy_echo.erl [new file with mode: 0755]
deps/sockjs/examples/cowboy_echo_authen_callback.erl [new file with mode: 0755]
deps/sockjs/examples/cowboy_test_server.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/cowboy_test_server.erl with 73% similarity]
deps/sockjs/examples/echo.html [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/echo.html with 93% similarity]
deps/sockjs/examples/echo_authen_callback.html [new file with mode: 0644]
deps/sockjs/examples/multiplex/cowboy_multiplex.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/cowboy_multiplex.erl with 58% similarity]
deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl [new file with mode: 0755]
deps/sockjs/examples/multiplex/index.html [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/index.html with 92% similarity]
deps/sockjs/examples/multiplex/index_authen_callback.html [new file with mode: 0644]
deps/sockjs/rebar [new file with mode: 0755]
deps/sockjs/rebar.config [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar.config with 53% similarity]
deps/sockjs/src/mochijson2_fork.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/mochijson2_fork.erl with 100% similarity]
deps/sockjs/src/mochinum_fork.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/mochinum_fork.erl with 100% similarity]
deps/sockjs/src/sockjs.app.src [new file with mode: 0644]
deps/sockjs/src/sockjs.erl [new file with mode: 0644]
deps/sockjs/src/sockjs_action.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_action.erl with 90% similarity]
deps/sockjs/src/sockjs_app.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_app.erl with 76% similarity]
deps/sockjs/src/sockjs_cowboy_handler.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_cowboy_handler.erl with 91% similarity]
deps/sockjs/src/sockjs_filters.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_filters.erl with 79% similarity]
deps/sockjs/src/sockjs_handler.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_handler.erl with 85% similarity]
deps/sockjs/src/sockjs_http.erl [new file with mode: 0644]
deps/sockjs/src/sockjs_internal.hrl [new file with mode: 0644]
deps/sockjs/src/sockjs_json.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_json.erl with 76% similarity]
deps/sockjs/src/sockjs_multiplex.erl [new file with mode: 0644]
deps/sockjs/src/sockjs_multiplex_channel.erl [new file with mode: 0644]
deps/sockjs/src/sockjs_service.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_service.erl with 100% similarity]
deps/sockjs/src/sockjs_session.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session.erl with 86% similarity]
deps/sockjs/src/sockjs_session_sup.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session_sup.erl with 88% similarity]
deps/sockjs/src/sockjs_util.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_util.erl with 88% similarity]
deps/sockjs/src/sockjs_ws_handler.erl [moved from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_ws_handler.erl with 88% similarity]
deps/webmachine/Emakefile [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/Emakefile with 100% similarity]
deps/webmachine/LICENSE [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/LICENSE with 100% similarity]
deps/webmachine/Makefile [new file with mode: 0644]
deps/webmachine/Makefile.orig.mk [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/Makefile with 100% similarity]
deps/webmachine/README.org [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/README.org with 100% similarity]
deps/webmachine/THANKS [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/THANKS with 100% similarity]
deps/webmachine/demo/Makefile [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/Makefile with 100% similarity]
deps/webmachine/demo/README [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/README with 100% similarity]
deps/webmachine/demo/priv/dispatch.conf [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/priv/dispatch.conf with 100% similarity]
deps/webmachine/demo/rebar.config [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/rebar.config with 100% similarity]
deps/webmachine/demo/src/webmachine_demo.app.src [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo.app.src with 100% similarity]
deps/webmachine/demo/src/webmachine_demo.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo.erl with 100% similarity]
deps/webmachine/demo/src/webmachine_demo_app.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_app.erl with 100% similarity]
deps/webmachine/demo/src/webmachine_demo_fs_resource.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_fs_resource.erl with 100% similarity]
deps/webmachine/demo/src/webmachine_demo_resource.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_resource.erl with 100% similarity]
deps/webmachine/demo/src/webmachine_demo_sup.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_sup.erl with 100% similarity]
deps/webmachine/demo/start.sh [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/start.sh with 100% similarity]
deps/webmachine/docs/http-headers-status-v3.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/docs/http-headers-status-v3.png with 100% similarity]
deps/webmachine/include/webmachine.hrl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/webmachine.hrl with 100% similarity]
deps/webmachine/include/webmachine_logger.hrl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/webmachine_logger.hrl with 100% similarity]
deps/webmachine/include/wm_reqdata.hrl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_reqdata.hrl with 100% similarity]
deps/webmachine/include/wm_reqstate.hrl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_reqstate.hrl with 100% similarity]
deps/webmachine/include/wm_resource.hrl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_resource.hrl with 100% similarity]
deps/webmachine/priv/templates/Makefile [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/Makefile with 100% similarity]
deps/webmachine/priv/templates/README [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/README with 100% similarity]
deps/webmachine/priv/templates/priv/dispatch.conf [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/priv/dispatch.conf with 100% similarity]
deps/webmachine/priv/templates/rebar.config [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/rebar.config with 100% similarity]
deps/webmachine/priv/templates/src/wmskel.app.src [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel.app.src with 100% similarity]
deps/webmachine/priv/templates/src/wmskel.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel.erl with 100% similarity]
deps/webmachine/priv/templates/src/wmskel_app.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_app.erl with 100% similarity]
deps/webmachine/priv/templates/src/wmskel_resource.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_resource.erl with 100% similarity]
deps/webmachine/priv/templates/src/wmskel_sup.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_sup.erl with 100% similarity]
deps/webmachine/priv/templates/start.sh [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/start.sh with 100% similarity]
deps/webmachine/priv/templates/wmskel.template [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/wmskel.template with 100% similarity]
deps/webmachine/priv/trace/http-headers-status-v3.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/http-headers-status-v3.png with 100% similarity]
deps/webmachine/priv/trace/wmtrace.css [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/wmtrace.css with 100% similarity]
deps/webmachine/priv/trace/wmtrace.js [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/wmtrace.js with 100% similarity]
deps/webmachine/priv/www/index.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/www/index.html with 100% similarity]
deps/webmachine/rebar [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar with 100% similarity]
deps/webmachine/rebar.config [new file with mode: 0644]
deps/webmachine/rebar.config.script [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config.script with 100% similarity]
deps/webmachine/scripts/new_webmachine.sh [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/scripts/new_webmachine.sh with 100% similarity]
deps/webmachine/src/webmachine.app.src [new file with mode: 0644]
deps/webmachine/src/webmachine.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.erl with 100% similarity]
deps/webmachine/src/webmachine_app.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_app.erl with 100% similarity]
deps/webmachine/src/webmachine_decision_core.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_decision_core.erl with 100% similarity]
deps/webmachine/src/webmachine_deps.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_deps.erl with 100% similarity]
deps/webmachine/src/webmachine_dispatcher.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_dispatcher.erl with 100% similarity]
deps/webmachine/src/webmachine_error.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_error.erl with 100% similarity]
deps/webmachine/src/webmachine_error_handler.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_error_handler.erl with 100% similarity]
deps/webmachine/src/webmachine_log.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_log.erl with 100% similarity]
deps/webmachine/src/webmachine_log_handler.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_log_handler.erl with 100% similarity]
deps/webmachine/src/webmachine_logger_watcher.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_logger_watcher.erl with 100% similarity]
deps/webmachine/src/webmachine_logger_watcher_sup.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_logger_watcher_sup.erl with 100% similarity]
deps/webmachine/src/webmachine_mochiweb.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_mochiweb.erl with 100% similarity]
deps/webmachine/src/webmachine_multipart.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_multipart.erl with 100% similarity]
deps/webmachine/src/webmachine_perf_log_handler.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_perf_log_handler.erl with 100% similarity]
deps/webmachine/src/webmachine_request.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_request.erl with 100% similarity]
deps/webmachine/src/webmachine_resource.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_resource.erl with 100% similarity]
deps/webmachine/src/webmachine_router.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_router.erl with 100% similarity]
deps/webmachine/src/webmachine_sup.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_sup.erl with 100% similarity]
deps/webmachine/src/webmachine_util.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_util.erl with 100% similarity]
deps/webmachine/src/wmtrace_resource.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/wmtrace_resource.erl with 100% similarity]
deps/webmachine/src/wrq.erl [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/wrq.erl with 100% similarity]
deps/webmachine/start-dev.sh [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/start-dev.sh with 100% similarity]
deps/webmachine/start.sh [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/start.sh with 100% similarity]
deps/webmachine/www/blogs.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/blogs.html with 100% similarity]
deps/webmachine/www/contact.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/contact.html with 100% similarity]
deps/webmachine/www/css/style-1c.css [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/css/style-1c.css with 100% similarity]
deps/webmachine/www/css/style.css [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/css/style.css with 100% similarity]
deps/webmachine/www/debugging.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/debugging.html with 100% similarity]
deps/webmachine/www/diagram.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/diagram.html with 100% similarity]
deps/webmachine/www/dispatcher.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/dispatcher.html with 100% similarity]
deps/webmachine/www/docs.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/docs.html with 100% similarity]
deps/webmachine/www/example_resources.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/example_resources.html with 100% similarity]
deps/webmachine/www/favicon.ico [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/favicon.ico with 100% similarity]
deps/webmachine/www/images/WM200-crop.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/WM200-crop.png with 100% similarity]
deps/webmachine/www/images/basho-landscape.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basho-landscape.gif with 100% similarity]
deps/webmachine/www/images/basic-trace-decision-tab.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-decision-tab.png with 100% similarity]
deps/webmachine/www/images/basic-trace-labeled.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-labeled.png with 100% similarity]
deps/webmachine/www/images/basic-trace-request-tab.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-request-tab.png with 100% similarity]
deps/webmachine/www/images/basic-trace-response-tab.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-response-tab.png with 100% similarity]
deps/webmachine/www/images/bg.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/bg.gif with 100% similarity]
deps/webmachine/www/images/blankbox.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/blankbox.gif with 100% similarity]
deps/webmachine/www/images/chash.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/chash.gif with 100% similarity]
deps/webmachine/www/images/easy-ops.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/easy-ops.gif with 100% similarity]
deps/webmachine/www/images/gossip4.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/gossip4.gif with 100% similarity]
deps/webmachine/www/images/halfblankbox.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/halfblankbox.gif with 100% similarity]
deps/webmachine/www/images/http-headers-status-v3.png [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/http-headers-status-v3.png with 100% similarity]
deps/webmachine/www/images/more.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/more.gif with 100% similarity]
deps/webmachine/www/images/site.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/site.gif with 100% similarity]
deps/webmachine/www/images/splash250.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/splash250.gif with 100% similarity]
deps/webmachine/www/images/vclock.gif [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/vclock.gif with 100% similarity]
deps/webmachine/www/index.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/index.html with 100% similarity]
deps/webmachine/www/intros.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/intros.html with 100% similarity]
deps/webmachine/www/mechanics.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/mechanics.html with 100% similarity]
deps/webmachine/www/quickstart.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/quickstart.html with 100% similarity]
deps/webmachine/www/reftrans.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/reftrans.html with 100% similarity]
deps/webmachine/www/reqdata.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/reqdata.html with 100% similarity]
deps/webmachine/www/resources.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/resources.html with 100% similarity]
deps/webmachine/www/streambody.html [moved from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/streambody.html with 100% similarity]
erlang.mk [new file with mode: 0644]
git-revisions.txt [new file with mode: 0644]
rabbitmq-components.mk [new file with mode: 0644]
rabbitmq-server/Makefile [deleted file]
rabbitmq-server/README [deleted file]
rabbitmq-server/calculate-relative [deleted file]
rabbitmq-server/codegen/license_info [deleted file]
rabbitmq-server/generate_app [deleted file]
rabbitmq-server/generate_deps [deleted file]
rabbitmq-server/plugins-src/Makefile [deleted file]
rabbitmq-server/plugins-src/README [deleted file]
rabbitmq-server/plugins-src/all-packages.mk [deleted file]
rabbitmq-server/plugins-src/common.mk [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/Makefile [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/README.md [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.done [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/cover.spec [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_requests_sup.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_forbidden_resource.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_simple_resource.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk [deleted file]
rabbitmq-server/plugins-src/cowboy-wrapper/package.mk [deleted file]
rabbitmq-server/plugins-src/do-package.mk [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/Makefile [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/.done [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/slapd.conf [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/tobbe.ldif [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/eldap-no-ssl-seed.patch [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/hash.mk [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/license_info [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/package.mk [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch [deleted file]
rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch [deleted file]
rabbitmq-server/plugins-src/generate_app [deleted file]
rabbitmq-server/plugins-src/generate_deps [deleted file]
rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper [deleted file]
rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper [deleted file]
rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/license_info [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.done [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request_tests.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_base64url_tests.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_html_tests.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_http_tests.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl [deleted file]
rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/lib-java/junit.jar [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/build.xml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/src/rabbit_amqp1_0_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/build.xml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-tests [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/etc/rabbit-test.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/people.ldif [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/rabbit.ldif [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/README.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_gen_consumer_spec.hrl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_client_SUITE.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_dbg.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/negative_test_util.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/test_util.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation-management/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation-management/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation-management/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_exchange_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_queue_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_test_util.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_unit_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-agent/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-agent/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-visualiser/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-visualiser/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/etc/bunny.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/etc/hare.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/etc/rabbit-test.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_db.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_stats.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/default-config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_clustering.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_db.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_db_unit.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_http.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_unit.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_util.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbitmqadmin-test-wrapper.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbitmqadmin-test.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-management/test/src/test-config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbitmq_mqtt.app.src [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/build.properties [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/build.xml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/rabbit-test.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/MqttTest.java [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/rabbit-test.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/tls/MutualAuth.java [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/rabbit_mqtt_util_tests.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/rabbitmq_mqtt_standalone.app.src [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/test.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-mqtt/test/test.sh [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/etc/rabbit-test.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/test/src/rabbit_shovel_mgmt_test_all.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel-management/test/src/rabbit_shovel_mgmt_test_http.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test_all.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test_dyn.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbitmq_stomp.app.src [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/ack.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/base.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/connect_options.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/destinations.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/errors.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/lifecycle.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/parsing.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/queue_properties.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_amqqueue_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_client.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_publish_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test_frame.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test_util.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/redelivered.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/reliability.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/ssl_lifecycle.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test.config [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_connect_options.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_runner.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_ssl.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_util.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/transactions.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/README [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbitmq_tracing.app.src [deleted file]
rabbitmq-server/plugins-src/rabbitmq-tracing/test/src/rabbit_tracing_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/priv/www/index.html [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/src/rabbit_web_dispatch_test.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/src/rabbit_web_dispatch_test_unit.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/stomp.js [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/Makefile [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/package.mk [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client_sup.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbitmq_web_stomp.app.src [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_raw_websocket.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_sockjs_websocket.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rfc6455_client.erl [deleted file]
rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/stomp.erl [deleted file]
rabbitmq-server/plugins-src/release.mk [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0000-remove-spec-patch.diff [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0001-a2b-b2a.diff [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0002-parameterised-modules-r16a.diff [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0003-websocket-subprotocol [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/Makefile [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/generate-0000-remove-spec-patch.sh [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/hash.mk [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/package.mk [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/.done [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/cowboy_echo.erl [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/multiplex.js [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/pmod_pt.erl [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.app.src [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.erl [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_http.erl [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_internal.hrl [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex.erl [deleted file]
rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex_channel.erl [deleted file]
rabbitmq-server/plugins-src/umbrella.mk [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/.srcdist_done [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/10-remove-crypto-dependency.patch [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/CONTRIBUTING.md [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/Makefile [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/hash.mk [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/license_info [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/package.mk [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.done [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.travis.yml [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.app.src [deleted file]
rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/test/etag_test.erl [deleted file]
rabbitmq-server/scripts/rabbitmq-env.bat [deleted file]
rabbitmq-server/src/pg2_fixed.erl [deleted file]
rabbitmq-server/src/rabbit_channel_interceptor.erl [deleted file]
rabbitmq-server/src/ssl_compat.erl [deleted file]
rabbitmq-server/src/tcp_acceptor.erl [deleted file]
rabbitmq-server/src/tcp_acceptor_sup.erl [deleted file]
rabbitmq-server/src/tcp_listener.erl [deleted file]
rabbitmq-server/src/tcp_listener_sup.erl [deleted file]
rabbitmq-server/src/time_compat.erl [deleted file]
rabbitmq-server/src/worker_pool_sup.erl [deleted file]
rabbitmq-server/version.mk [deleted file]
scripts/rabbitmq-script-wrapper [moved from debian/rabbitmq-script-wrapper with 63% similarity, mode: 0644]
scripts/rabbitmq-server-ha.ocf [new file with mode: 0755]
scripts/rabbitmq-server.ocf [moved from debian/ocf/rabbitmq-server with 98% similarity]
scripts/travis_test_ocf_ra.sh [new file with mode: 0644]
upgrade/Makefile [new file with mode: 0644]
upgrade/README.md [new file with mode: 0644]
upgrade/config/enabled_plugins [new file with mode: 0644]
upgrade/config/rabbitmq.config [new file with mode: 0644]
upgrade/scripts/upgrade-from-3.5-helpers.sh [new file with mode: 0644]
upgrade/scripts/upgrade-from.sh [new file with mode: 0755]
upgrade/scripts/upgrade-helpers.sh [new file with mode: 0644]
upgrade/scripts/upgrade-to-3.6-helpers.sh [new file with mode: 0644]
upgrade/scripts/upgrade-to-3.7-helpers.sh [new file with mode: 0644]
upgrade/scripts/upgrade-to.sh [new file with mode: 0755]

diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
similarity index 72%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/CONTRIBUTING.md
rename to CONTRIBUTING.md
index 69a4b4a437fdf25c45c200610d780c7a009146be..45bbcbe62e74c1a8682d2097db8eec955d177b9c 100644 (file)
@@ -20,22 +20,9 @@ If what you are going to work on is a substantial change, please first ask the c
 of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
 
 
-## (Brief) Code of Conduct
+## Code of Conduct
 
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
 
 
 ## Contributor Agreement
similarity index 98%
rename from rabbitmq-server/LICENSE
rename to LICENSE
index 9deeb23c0dcfa4e26223ed4ff3c907de8e98c695..1834aa55012de407364aa1cdbf054acc1c2b97c7 100644 (file)
+++ b/LICENSE
@@ -4,7 +4,7 @@ If you have any questions regarding licensing, please contact us at
 info@rabbitmq.com.
 
 The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are
-"Copyright (C) 2008-2013 GoPivotal", Inc. and are covered by the MIT
+"Copyright (C) 2008-2016 Pivotal Software, Inc", Inc. and are covered by the MIT
 license.
 
 jQuery is "Copyright (c) 2010 John Resig" and is covered by the MIT
@@ -24,16 +24,6 @@ http://code.google.com/p/explorercanvas/
 Flot is "Copyright (c) 2007-2013 IOLA and Ole Laursen" and is covered
 by the MIT license. It was downloaded from
 http://www.flotcharts.org/
-Webmachine is Copyright (c) Basho Technologies and is covered by the
-Apache License 2.0.  It was downloaded from http://webmachine.basho.com/
-
-Eldap is "Copyright (c) 2010, Torbjorn Tornkvist" and is covered by
-the MIT license.  It was downloaded from https://github.com/etnt/eldap
-
-Mochiweb is "Copyright (c) 2007 Mochi Media, Inc." and is covered by
-the MIT license.  It was downloaded from
-http://github.com/mochi/mochiweb/
-
 glMatrix is "Copyright (c) 2011, Brandon Jones" and is covered by the
 BSD 2-Clause license.  It was downloaded from
 http://code.google.com/p/glmatrix/
diff --git a/LICENSE-EPL-OTP b/LICENSE-EPL-OTP
new file mode 100644 (file)
index 0000000..2257751
--- /dev/null
@@ -0,0 +1,286 @@
+ERLANG PUBLIC LICENSE
+Version 1.1
+
+1. Definitions.
+
+1.1. ``Contributor'' means each entity that creates or contributes to
+the creation of Modifications.
+
+1.2. ``Contributor Version'' means the combination of the Original
+Code, prior Modifications used by a Contributor, and the Modifications
+made by that particular Contributor.
+
+1.3. ``Covered Code'' means the Original Code or Modifications or the
+combination of the Original Code and Modifications, in each case
+including portions thereof.
+
+1.4. ``Electronic Distribution Mechanism'' means a mechanism generally
+accepted in the software development community for the electronic
+transfer of data.
+
+1.5. ``Executable'' means Covered Code in any form other than Source
+Code.
+
+1.6. ``Initial Developer'' means the individual or entity identified
+as the Initial Developer in the Source Code notice required by Exhibit
+A.
+
+1.7. ``Larger Work'' means a work which combines Covered Code or
+portions thereof with code not governed by the terms of this License.
+
+1.8. ``License'' means this document.
+
+1.9. ``Modifications'' means any addition to or deletion from the
+substance or structure of either the Original Code or any previous
+Modifications. When Covered Code is released as a series of files, a
+Modification is:
+
+A. Any addition to or deletion from the contents of a file containing
+   Original Code or previous Modifications. 
+
+B. Any new file that contains any part of the Original Code or
+   previous Modifications. 
+
+1.10. ``Original Code'' means Source Code of computer software code
+which is described in the Source Code notice required by Exhibit A as
+Original Code, and which, at the time of its release under this
+License is not already Covered Code governed by this License.
+
+1.11. ``Source Code'' means the preferred form of the Covered Code for
+making modifications to it, including all modules it contains, plus
+any associated interface definition files, scripts used to control
+compilation and installation of an Executable, or a list of source
+code differential comparisons against either the Original Code or
+another well known, available Covered Code of the Contributor's
+choice. The Source Code can be in a compressed or archival form,
+provided the appropriate decompression or de-archiving software is
+widely available for no charge.
+
+1.12. ``You'' means an individual or a legal entity exercising rights
+under, and complying with all of the terms of, this License. For legal
+entities,``You'' includes any entity which controls, is controlled by,
+or is under common control with You. For purposes of this definition,
+``control'' means (a) the power, direct or indirect, to cause the
+direction or management of such entity, whether by contract or
+otherwise, or (b) ownership of fifty percent (50%) or more of the
+outstanding shares or beneficial ownership of such entity.
+
+2. Source Code License.
+
+2.1. The Initial Developer Grant.
+The Initial Developer hereby grants You a world-wide, royalty-free,
+non-exclusive license, subject to third party intellectual property
+claims:
+
+(a) to use, reproduce, modify, display, perform, sublicense and
+    distribute the Original Code (or portions thereof) with or without
+    Modifications, or as part of a Larger Work; and 
+
+(b) under patents now or hereafter owned or controlled by Initial
+    Developer, to make, have made, use and sell (``Utilize'') the
+    Original Code (or portions thereof), but solely to the extent that
+    any such patent is reasonably necessary to enable You to Utilize
+    the Original Code (or portions thereof) and not to any greater
+    extent that may be necessary to Utilize further Modifications or
+    combinations. 
+
+2.2. Contributor Grant.
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license, subject to third party intellectual property
+claims:
+
+(a) to use, reproduce, modify, display, perform, sublicense and
+    distribute the Modifications created by such Contributor (or
+    portions thereof) either on an unmodified basis, with other
+    Modifications, as Covered Code or as part of a Larger Work; and 
+
+(b) under patents now or hereafter owned or controlled by Contributor,
+    to Utilize the Contributor Version (or portions thereof), but
+    solely to the extent that any such patent is reasonably necessary
+    to enable You to Utilize the Contributor Version (or portions
+    thereof), and not to any greater extent that may be necessary to
+    Utilize further Modifications or combinations. 
+
+3. Distribution Obligations.
+
+3.1. Application of License.
+The Modifications which You contribute are governed by the terms of
+this License, including without limitation Section 2.2. The Source
+Code version of Covered Code may be distributed only under the terms
+of this License, and You must include a copy of this License with
+every copy of the Source Code You distribute. You may not offer or
+impose any terms on any Source Code version that alters or restricts
+the applicable version of this License or the recipients' rights
+hereunder. However, You may include an additional document offering
+the additional rights described in Section 3.5. 
+
+3.2. Availability of Source Code.
+Any Modification which You contribute must be made available in Source
+Code form under the terms of this License either on the same media as
+an Executable version or via an accepted Electronic Distribution
+Mechanism to anyone to whom you made an Executable version available;
+and if made available via Electronic Distribution Mechanism, must
+remain available for at least twelve (12) months after the date it
+initially became available, or at least six (6) months after a
+subsequent version of that particular Modification has been made
+available to such recipients. You are responsible for ensuring that
+the Source Code version remains available even if the Electronic
+Distribution Mechanism is maintained by a third party.
+
+3.3. Description of Modifications.
+You must cause all Covered Code to which you contribute to contain a
+file documenting the changes You made to create that Covered Code and
+the date of any change. You must include a prominent statement that
+the Modification is derived, directly or indirectly, from Original
+Code provided by the Initial Developer and including the name of the
+Initial Developer in (a) the Source Code, and (b) in any notice in an
+Executable version or related documentation in which You describe the
+origin or ownership of the Covered Code.
+
+3.4. Intellectual Property Matters
+
+(a) Third Party Claims.
+    If You have knowledge that a party claims an intellectual property
+    right in particular functionality or code (or its utilization
+    under this License), you must include a text file with the source
+    code distribution titled ``LEGAL'' which describes the claim and
+    the party making the claim in sufficient detail that a recipient
+    will know whom to contact. If you obtain such knowledge after You
+    make Your Modification available as described in Section 3.2, You
+    shall promptly modify the LEGAL file in all copies You make
+    available thereafter and shall take other steps (such as notifying
+    appropriate mailing lists or newsgroups) reasonably calculated to
+    inform those who received the Covered Code that new knowledge has
+    been obtained. 
+
+(b) Contributor APIs.
+    If Your Modification is an application programming interface and
+    You own or control patents which are reasonably necessary to
+    implement that API, you must also include this information in the
+    LEGAL file. 
+
+3.5. Required Notices.
+You must duplicate the notice in Exhibit A in each file of the Source
+Code, and this License in any documentation for the Source Code, where
+You describe recipients' rights relating to Covered Code. If You
+created one or more Modification(s), You may add your name as a
+Contributor to the notice described in Exhibit A. If it is not
+possible to put such notice in a particular Source Code file due to
+its structure, then you must include such notice in a location (such
+as a relevant directory file) where a user would be likely to look for
+such a notice. You may choose to offer, and to charge a fee for,
+warranty, support, indemnity or liability obligations to one or more
+recipients of Covered Code. However, You may do so only on Your own
+behalf, and not on behalf of the Initial Developer or any
+Contributor. You must make it absolutely clear than any such warranty,
+support, indemnity or liability obligation is offered by You alone,
+and You hereby agree to indemnify the Initial Developer and every
+Contributor for any liability incurred by the Initial Developer or
+such Contributor as a result of warranty, support, indemnity or
+liability terms You offer.
+
+3.6. Distribution of Executable Versions.
+You may distribute Covered Code in Executable form only if the
+requirements of Section 3.1-3.5 have been met for that Covered Code,
+and if You include a notice stating that the Source Code version of
+the Covered Code is available under the terms of this License,
+including a description of how and where You have fulfilled the
+obligations of Section 3.2. The notice must be conspicuously included
+in any notice in an Executable version, related documentation or
+collateral in which You describe recipients' rights relating to the
+Covered Code. You may distribute the Executable version of Covered
+Code under a license of Your choice, which may contain terms different
+from this License, provided that You are in compliance with the terms
+of this License and that the license for the Executable version does
+not attempt to limit or alter the recipient's rights in the Source
+Code version from the rights set forth in this License. If You
+distribute the Executable version under a different license You must
+make it absolutely clear that any terms which differ from this License
+are offered by You alone, not by the Initial Developer or any
+Contributor. You hereby agree to indemnify the Initial Developer and
+every Contributor for any liability incurred by the Initial Developer
+or such Contributor as a result of any such terms You offer.
+
+3.7. Larger Works.
+You may create a Larger Work by combining Covered Code with other code
+not governed by the terms of this License and distribute the Larger
+Work as a single product. In such a case, You must make sure the
+requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Code due to statute
+or regulation then You must: (a) comply with the terms of this License
+to the maximum extent possible; and (b) describe the limitations and
+the code they affect. Such description must be included in the LEGAL
+file described in Section 3.4 and must be included with all
+distributions of the Source Code. Except to the extent prohibited by
+statute or regulation, such description must be sufficiently detailed
+for a recipient of ordinary skill to be able to understand it.
+
+5. Application of this License.
+
+This License applies to code to which the Initial Developer has
+attached the notice in Exhibit A, and to related Covered Code.
+
+6. CONNECTION TO MOZILLA PUBLIC LICENSE
+
+This Erlang License is a derivative work of the Mozilla Public
+License, Version 1.0. It contains terms which differ from the Mozilla
+Public License, Version 1.0.
+
+7. DISCLAIMER OF WARRANTY.
+
+COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN ``AS IS'' BASIS,
+WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR
+NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF
+THE COVERED CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE
+IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER
+CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR
+CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART
+OF THIS LICENSE. NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER
+EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+This License and the rights granted hereunder will terminate
+automatically if You fail to comply with terms herein and fail to cure
+such breach within 30 days of becoming aware of the breach. All
+sublicenses to the Covered Code which are properly granted shall
+survive any termination of this License. Provisions which, by their
+nature, must remain in effect beyond the termination of this License
+shall survive.
+
+9. DISCLAIMER OF LIABILITY
+Any utilization of Covered Code shall not cause the Initial Developer
+or any Contributor to be liable for any damages (neither direct nor
+indirect).
+
+10. MISCELLANEOUS
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision is held to be unenforceable, such
+provision shall be reformed only to the extent necessary to make it
+enforceable. This License shall be construed by and in accordance with
+the substantive laws of Sweden. Any dispute, controversy or claim
+arising out of or relating to this License, or the breach, termination
+or invalidity thereof, shall be subject to the exclusive jurisdiction
+of Swedish courts, with the Stockholm City Court as the first
+instance.
+       
+EXHIBIT A.
+
+``The contents of this file are subject to the Erlang Public License,
+Version 1.1, (the "License"); you may not use this file except in
+compliance with the License. You should have received a copy of the
+Erlang Public License along with this software. If not, it can be
+retrieved via the world wide web at http://www.erlang.org/.
+
+Software distributed under the License is distributed on an "AS IS"
+basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+the License for the specific language governing rights and limitations
+under the License.
+
+The Initial Developer of the Original Code is Ericsson AB.
+Portions created by Ericsson are Copyright 2013, Ericsson AB.
+All Rights Reserved.''
diff --git a/LICENSE-MIT-Erlware-Commons b/LICENSE-MIT-Erlware-Commons
new file mode 100644 (file)
index 0000000..fc89c02
--- /dev/null
@@ -0,0 +1,21 @@
+Copyright (c) 2011 Erlware, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/LICENSE-MPL2 b/LICENSE-MPL2
new file mode 100644 (file)
index 0000000..14e2f77
--- /dev/null
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+    means each individual or legal entity that creates, contributes to
+    the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+    means the combination of the Contributions of others (if any) used
+    by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+    means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+    means Source Code Form to which the initial Contributor has attached
+    the notice in Exhibit A, the Executable Form of such Source Code
+    Form, and Modifications of such Source Code Form, in each case
+    including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+    means
+
+    (a) that the initial Contributor has attached the notice described
+        in Exhibit B to the Covered Software; or
+
+    (b) that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the
+        terms of a Secondary License.
+
+1.6. "Executable Form"
+    means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+    means a work that combines Covered Software with other material, in 
+    a separate file or files, that is not Covered Software.
+
+1.8. "License"
+    means this document.
+
+1.9. "Licensable"
+    means having the right to grant, to the maximum extent possible,
+    whether at the time of the initial grant or subsequently, any and
+    all of the rights conveyed by this License.
+
+1.10. "Modifications"
+    means any of the following:
+
+    (a) any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered
+        Software; or
+
+    (b) any new file in Source Code Form that contains any Covered
+        Software.
+
+1.11. "Patent Claims" of a Contributor
+    means any patent claim(s), including without limitation, method,
+    process, and apparatus claims, in any patent Licensable by such
+    Contributor that would be infringed, but for the grant of the
+    License, by the making, using, selling, offering for sale, having
+    made, import, or transfer of either its Contributions or its
+    Contributor Version.
+
+1.12. "Secondary License"
+    means either the GNU General Public License, Version 2.0, the GNU
+    Lesser General Public License, Version 2.1, the GNU Affero General
+    Public License, Version 3.0, or any later versions of those
+    licenses.
+
+1.13. "Source Code Form"
+    means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+    means an individual or a legal entity exercising rights under this
+    License. For legal entities, "You" includes any entity that
+    controls, is controlled by, or is under common control with You. For
+    purposes of this definition, "control" means (a) the power, direct
+    or indirect, to cause the direction or management of such entity,
+    whether by contract or otherwise, or (b) ownership of more than
+    fifty percent (50%) of the outstanding shares or beneficial
+    ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+    Licensable by such Contributor to use, reproduce, make available,
+    modify, display, perform, distribute, and otherwise exploit its
+    Contributions, either on an unmodified basis, with Modifications, or
+    as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+    for sale, have made, import, and otherwise transfer either its
+    Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+    or
+
+(b) for infringements caused by: (i) Your and any other third party's
+    modifications of Covered Software, or (ii) the combination of its
+    Contributions with other software (except as part of its Contributor
+    Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+    its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+    Form, as described in Section 3.1, and You must inform recipients of
+    the Executable Form how they can obtain a copy of such Source Code
+    Form by reasonable means in a timely manner, at a charge no more
+    than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+    License, or sublicense it under different terms, provided that the
+    license for the Executable Form does not attempt to limit or alter
+    the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+*                                                                      *
+*  6. Disclaimer of Warranty                                           *
+*  -------------------------                                           *
+*                                                                      *
+*  Covered Software is provided under this License on an "as is"       *
+*  basis, without warranty of any kind, either expressed, implied, or  *
+*  statutory, including, without limitation, warranties that the       *
+*  Covered Software is free of defects, merchantable, fit for a        *
+*  particular purpose or non-infringing. The entire risk as to the     *
+*  quality and performance of the Covered Software is with You.        *
+*  Should any Covered Software prove defective in any respect, You     *
+*  (not any Contributor) assume the cost of any necessary servicing,   *
+*  repair, or correction. This disclaimer of warranty constitutes an   *
+*  essential part of this License. No use of any Covered Software is   *
+*  authorized under this License except under this disclaimer.         *
+*                                                                      *
+************************************************************************
+
+************************************************************************
+*                                                                      *
+*  7. Limitation of Liability                                          *
+*  --------------------------                                          *
+*                                                                      *
+*  Under no circumstances and under no legal theory, whether tort      *
+*  (including negligence), contract, or otherwise, shall any           *
+*  Contributor, or anyone who distributes Covered Software as          *
+*  permitted above, be liable to You for any direct, indirect,         *
+*  special, incidental, or consequential damages of any character      *
+*  including, without limitation, damages for lost profits, loss of    *
+*  goodwill, work stoppage, computer failure or malfunction, or any    *
+*  and all other commercial damages or losses, even if such party      *
+*  shall have been informed of the possibility of such damages. This   *
+*  limitation of liability shall not apply to liability for death or   *
+*  personal injury resulting from such party's negligence to the       *
+*  extent applicable law prohibits such limitation. Some               *
+*  jurisdictions do not allow the exclusion or limitation of           *
+*  incidental or consequential damages, so this exclusion and          *
+*  limitation may not apply to You.                                    *
+*                                                                      *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+  This Source Code Form is subject to the terms of the Mozilla Public
+  License, v. 2.0. If a copy of the MPL was not distributed with this
+  file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+  This Source Code Form is "Incompatible With Secondary Licenses", as
+  defined by the Mozilla Public License, v. 2.0.
diff --git a/Makefile b/Makefile
new file mode 100644 (file)
index 0000000..5558f48
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,376 @@
+PROJECT = rabbitmq_server_release
+VERSION ?= 0.0.0
+
+# Release artifacts are put in $(PACKAGES_DIR).
+PACKAGES_DIR ?= $(abspath PACKAGES)
+
+DEPS = rabbit_common rabbit $(PLUGINS)
+
+# List of plugins to include in a RabbitMQ release.
+PLUGINS := rabbitmq_amqp1_0 \
+          rabbitmq_auth_backend_ldap \
+          rabbitmq_auth_mechanism_ssl \
+          rabbitmq_consistent_hash_exchange \
+          rabbitmq_event_exchange \
+          rabbitmq_federation \
+          rabbitmq_federation_management \
+          rabbitmq_jms_topic_exchange \
+          rabbitmq_management \
+          rabbitmq_management_agent \
+          rabbitmq_management_visualiser \
+          rabbitmq_mqtt \
+          rabbitmq_recent_history_exchange \
+          rabbitmq_sharding \
+          rabbitmq_shovel \
+          rabbitmq_shovel_management \
+          rabbitmq_stomp \
+          rabbitmq_top \
+          rabbitmq_tracing \
+          rabbitmq_trust_store \
+          rabbitmq_web_dispatch \
+          rabbitmq_web_stomp \
+          rabbitmq_web_stomp_examples
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-run.mk \
+             rabbit_common/mk/rabbitmq-dist.mk \
+             rabbit_common/mk/rabbitmq-tools.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
+
+# --------------------------------------------------------------------
+# Distribution.
+# --------------------------------------------------------------------
+
+.PHONY: source-dist clean-source-dist
+
+SOURCE_DIST_BASE ?= rabbitmq-server
+SOURCE_DIST_SUFFIXES ?= tar.xz zip
+SOURCE_DIST ?= $(PACKAGES_DIR)/$(SOURCE_DIST_BASE)-$(VERSION)
+
+# The first source distribution file is used by packages: if the archive
+# type changes, you must update all packages' Makefile.
+SOURCE_DIST_FILES = $(addprefix $(SOURCE_DIST).,$(SOURCE_DIST_SUFFIXES))
+
+.PHONY: $(SOURCE_DIST_FILES)
+
+source-dist: $(SOURCE_DIST_FILES)
+       @:
+
+RSYNC ?= rsync
+RSYNC_V_0 =
+RSYNC_V_1 = -v
+RSYNC_V_2 = -v
+RSYNC_V = $(RSYNC_V_$(V))
+RSYNC_FLAGS += -a $(RSYNC_V)           \
+              --exclude '.sw?' --exclude '.*.sw?'      \
+              --exclude '*.beam'                       \
+              --exclude '*.d'                          \
+              --exclude '*.pyc'                        \
+              --exclude '.git*'                        \
+              --exclude '.hg*'                         \
+              --exclude '.travis.yml'                  \
+              --exclude '.*.plt'                       \
+              --exclude '$(notdir $(ERLANG_MK_TMP))'   \
+              --exclude 'ebin'                         \
+              --exclude 'packaging'                    \
+              --exclude 'erl_crash.dump'               \
+              --exclude 'MnesiaCore.*'                 \
+              --exclude 'cover/'                       \
+              --exclude 'deps/'                        \
+              --exclude 'ebin/'                        \
+              --exclude '$(notdir $(DEPS_DIR))/'       \
+              --exclude 'logs/'                        \
+              --exclude 'plugins/'                     \
+              --exclude '$(notdir $(DIST_DIR))/'       \
+              --exclude 'test'                         \
+              --exclude 'xrefr'                        \
+              --exclude '/$(notdir $(PACKAGES_DIR))/'  \
+              --exclude '/PACKAGES/'                   \
+              --exclude '/cowboy/doc/'                 \
+              --exclude '/cowboy/examples/'            \
+              --exclude '/rabbitmq_amqp1_0/test/swiftmq/build/'\
+              --exclude '/rabbitmq_amqp1_0/test/swiftmq/swiftmq*'\
+              --exclude '/rabbitmq_mqtt/test/build/'   \
+              --exclude '/rabbitmq_mqtt/test/test_client/'\
+              --delete                                 \
+              --delete-excluded
+
+TAR ?= tar
+TAR_V_0 =
+TAR_V_1 = -v
+TAR_V_2 = -v
+TAR_V = $(TAR_V_$(V))
+
+GZIP ?= gzip
+BZIP2 ?= bzip2
+XZ ?= xz
+
+ZIP ?= zip
+ZIP_V_0 = -q
+ZIP_V_1 =
+ZIP_V_2 =
+ZIP_V = $(ZIP_V_$(V))
+
+.PHONY: $(SOURCE_DIST)
+.PHONY: clean-source-dist distclean-packages clean-unpacked-source-dist
+
+$(SOURCE_DIST): $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+       $(verbose) mkdir -p $(dir $@)
+       $(gen_verbose) $(RSYNC) $(RSYNC_FLAGS) ./ $@/
+       $(verbose) echo "$(PROJECT) $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)" > $@/git-revisions.txt
+       $(verbose) cat packaging/common/LICENSE.head > $@/LICENSE
+       $(verbose) mkdir -p $@/deps/licensing
+       $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST) | LC_COLLATE=C sort); do \
+               $(RSYNC) $(RSYNC_FLAGS) \
+                $$dep \
+                $@/deps; \
+               if test -f $@/deps/$$(basename $$dep)/erlang.mk && \
+                  test "$$(wc -l $@/deps/$$(basename $$dep)/erlang.mk | awk '{print $$1;}')" = "1" && \
+                  grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" $@/deps/$$(basename $$dep)/erlang.mk; then \
+                       echo "include ../../erlang.mk" > $@/deps/$$(basename $$dep)/erlang.mk; \
+               fi; \
+               sed -E -i.bak "s|^[[:blank:]]*include[[:blank:]]+\.\./.*erlang.mk$$|include ../../erlang.mk|" \
+                $@/deps/$$(basename $$dep)/Makefile && \
+               rm $@/deps/$$(basename $$dep)/Makefile.bak; \
+               if test -f "$$dep/license_info"; then \
+                       cp "$$dep/license_info" "$@/deps/licensing/license_info_$$(basename "$$dep")"; \
+                       cat "$$dep/license_info" >> $@/LICENSE; \
+               fi; \
+               find "$$dep" -maxdepth 1 -name 'LICENSE-*' -exec cp '{}' $@/deps/licensing \; ; \
+               (cd $$dep; echo "$$(basename "$$dep") $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)") >> $@/git-revisions.txt; \
+       done
+       $(verbose) cat packaging/common/LICENSE.tail >> $@/LICENSE
+       $(verbose) find $@/deps/licensing -name 'LICENSE-*' -exec cp '{}' $@ \;
+       $(verbose) for file in $$(find $@ -name '*.app.src'); do \
+               sed -E -i.bak -e 's/[{]vsn[[:blank:]]*,[[:blank:]]*(""|"0.0.0")[[:blank:]]*}/{vsn, "$(VERSION)"}/' $$file; \
+               rm $$file.bak; \
+       done
+
+# TODO: Fix file timestamps to have reproducible source archives.
+# $(verbose) find $@ -not -name 'git-revisions.txt' -print0 | xargs -0 touch -r $@/git-revisions.txt
+
+$(SOURCE_DIST).tar.gz: $(SOURCE_DIST)
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \
+               $(GZIP) --best > $@
+
+$(SOURCE_DIST).tar.bz2: $(SOURCE_DIST)
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \
+               $(BZIP2) > $@
+
+$(SOURCE_DIST).tar.xz: $(SOURCE_DIST)
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \
+               $(XZ) > $@
+
+$(SOURCE_DIST).zip: $(SOURCE_DIST)
+       $(verbose) rm -f $@
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(ZIP) $(ZIP_V) $@
+
+clean:: clean-source-dist clean-upgrade
+
+clean-upgrade:
+       cd upgrade && make clean
+
+clean-source-dist:
+       $(gen_verbose) rm -rf -- $(SOURCE_DIST_BASE)-*
+
+distclean:: distclean-packages distclean-upgrade
+
+distclean-upgrade:
+       cd upgrade && make distclean
+
+distclean-packages:
+       $(gen_verbose) rm -rf -- $(PACKAGES_DIR)
+
+clean-unpacked-source-dist:
+       for d in deps/*; do \
+               if test -f $$d/Makefile; then \
+                       make -C $$d clean || exit $$?; \
+               fi; \
+       done
+
+# --------------------------------------------------------------------
+# Packaging.
+# --------------------------------------------------------------------
+
+.PHONY: packages package-deb \
+       package-rpm package-rpm-fedora package-rpm-suse \
+       package-windows package-standalone-macosx \
+       package-generic-unix
+
+# This variable is exported so sub-make instances know where to find the
+# archive.
+PACKAGES_SOURCE_DIST_FILE ?= $(firstword $(SOURCE_DIST_FILES))
+
+packages package-deb package-rpm package-rpm-fedora \
+package-rpm-suse package-windows package-standalone-macosx \
+package-generic-unix: $(PACKAGES_SOURCE_DIST_FILE)
+       $(verbose) $(MAKE) -C packaging $@ \
+               SOURCE_DIST_FILE=$(abspath $(PACKAGES_SOURCE_DIST_FILE))
+
+# --------------------------------------------------------------------
+# Installation.
+# --------------------------------------------------------------------
+
+.PHONY: manpages web-manpages distclean-manpages
+
+manpages web-manpages distclean-manpages:
+       $(MAKE) -C $(DEPS_DIR)/rabbit $@ DEPS_DIR=$(DEPS_DIR)
+
+.PHONY: install install-erlapp install-scripts install-bin install-man \
+       install-windows install-windows-erlapp install-windows-scripts \
+       install-windows-docs
+
+DESTDIR ?=
+
+PREFIX ?= /usr/local
+WINDOWS_PREFIX ?= rabbitmq-server-windows-$(VERSION)
+
+MANDIR ?= $(PREFIX)/share/man
+RMQ_ROOTDIR ?= $(PREFIX)/lib/erlang
+RMQ_BINDIR ?= $(RMQ_ROOTDIR)/bin
+RMQ_LIBDIR ?= $(RMQ_ROOTDIR)/lib
+RMQ_ERLAPP_DIR ?= $(RMQ_LIBDIR)/rabbitmq_server-$(VERSION)
+
+SCRIPTS = rabbitmq-defaults \
+         rabbitmq-env \
+         rabbitmq-server \
+         rabbitmqctl \
+         rabbitmq-plugins
+
+WINDOWS_SCRIPTS = rabbitmq-defaults.bat \
+                 rabbitmq-echopid.bat \
+                 rabbitmq-env.bat \
+                 rabbitmq-plugins.bat \
+                 rabbitmq-server.bat \
+                 rabbitmq-service.bat \
+                 rabbitmqctl.bat
+
+UNIX_TO_DOS ?= todos
+
+inst_verbose_0 = @echo " INST  " $@;
+inst_verbose = $(inst_verbose_$(V))
+
+install: install-erlapp install-scripts
+
+install-erlapp: dist
+       $(verbose) mkdir -p $(DESTDIR)$(RMQ_ERLAPP_DIR)
+       $(inst_verbose) cp -r \
+               LICENSE* \
+               $(DEPS_DIR)/rabbit/ebin \
+               $(DEPS_DIR)/rabbit/INSTALL \
+               $(DIST_DIR) \
+               $(DESTDIR)$(RMQ_ERLAPP_DIR)
+       $(verbose) echo "Put your EZs here and use rabbitmq-plugins to enable them." \
+               > $(DESTDIR)$(RMQ_ERLAPP_DIR)/$(notdir $(DIST_DIR))/README
+
+       @# FIXME: Why do we copy headers?
+       $(verbose) cp -r \
+               $(DEPS_DIR)/rabbit/include \
+               $(DESTDIR)$(RMQ_ERLAPP_DIR)
+       @# rabbitmq-common provides headers too: copy them to
+       @# rabbitmq_server/include.
+       $(verbose) cp -r \
+               $(DEPS_DIR)/rabbit_common/include \
+               $(DESTDIR)$(RMQ_ERLAPP_DIR)
+
+install-scripts:
+       $(verbose) mkdir -p $(DESTDIR)$(RMQ_ERLAPP_DIR)/sbin
+       $(inst_verbose) for script in $(SCRIPTS); do \
+               cp "$(DEPS_DIR)/rabbit/scripts/$$script" \
+                       "$(DESTDIR)$(RMQ_ERLAPP_DIR)/sbin"; \
+               chmod 0755 "$(DESTDIR)$(RMQ_ERLAPP_DIR)/sbin/$$script"; \
+       done
+
+# FIXME: We do symlinks to scripts in $(RMQ_ERLAPP_DIR))/sbin but this
+# code assumes a certain hierarchy to make relative symlinks.
+install-bin: install-scripts
+       $(verbose) mkdir -p $(DESTDIR)$(RMQ_BINDIR)
+       $(inst_verbose) for script in $(SCRIPTS); do \
+               test -e $(DESTDIR)$(RMQ_BINDIR)/$$script || \
+                       ln -sf ../lib/$(notdir $(RMQ_ERLAPP_DIR))/sbin/$$script \
+                        $(DESTDIR)$(RMQ_BINDIR)/$$script; \
+       done
+
+install-man: manpages
+       $(inst_verbose) sections=$$(ls -1 $(DEPS_DIR)/rabbit/docs/*.[1-9] \
+               | sed -E 's/.*\.([1-9])$$/\1/' | uniq | sort); \
+       for section in $$sections; do \
+               mkdir -p $(DESTDIR)$(MANDIR)/man$$section; \
+               for manpage in $(DEPS_DIR)/rabbit/docs/*.$$section; do \
+                       gzip < $$manpage \
+                        > $(DESTDIR)$(MANDIR)/man$$section/$$(basename $$manpage).gz; \
+               done; \
+       done
+
+install-windows: install-windows-erlapp install-windows-scripts install-windows-docs
+
+install-windows-erlapp: dist
+       $(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX)
+       $(inst_verbose) cp -r \
+               LICENSE* \
+               $(DEPS_DIR)/rabbit/ebin \
+               $(DEPS_DIR)/rabbit/INSTALL \
+               $(DIST_DIR) \
+               $(DESTDIR)$(WINDOWS_PREFIX)
+       $(verbose) echo "Put your EZs here and use rabbitmq-plugins.bat to enable them." \
+               > $(DESTDIR)$(WINDOWS_PREFIX)/$(notdir $(DIST_DIR))/README.txt
+       $(verbose) $(UNIX_TO_DOS) $(DESTDIR)$(WINDOWS_PREFIX)/plugins/README.txt
+
+       @# FIXME: Why do we copy headers?
+       $(verbose) cp -r \
+               $(DEPS_DIR)/rabbit/include \
+               $(DESTDIR)$(WINDOWS_PREFIX)
+       @# rabbitmq-common provides headers too: copy them to
+       @# rabbitmq_server/include.
+       $(verbose) cp -r \
+               $(DEPS_DIR)/rabbit_common/include \
+               $(DESTDIR)$(WINDOWS_PREFIX)
+
+install-windows-scripts:
+       $(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX)/sbin
+       $(inst_verbose) for script in $(WINDOWS_SCRIPTS); do \
+               cp "$(DEPS_DIR)/rabbit/scripts/$$script" \
+                       "$(DESTDIR)$(WINDOWS_PREFIX)/sbin"; \
+               chmod 0755 "$(DESTDIR)$(WINDOWS_PREFIX)/sbin/$$script"; \
+       done
+
+install-windows-docs: install-windows-erlapp
+       $(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX)/etc
+       $(inst_verbose) xmlto -o . xhtml-nochunks \
+               $(DEPS_DIR)/rabbit/docs/rabbitmq-service.xml
+       $(verbose) elinks -dump -no-references -no-numbering \
+               rabbitmq-service.html \
+               > $(DESTDIR)$(WINDOWS_PREFIX)/readme-service.txt
+       $(verbose) rm rabbitmq-service.html
+       $(verbose) cp $(DEPS_DIR)/rabbit/docs/rabbitmq.config.example \
+               $(DESTDIR)$(WINDOWS_PREFIX)/etc
+       $(verbose) for file in \
+        $(DESTDIR)$(WINDOWS_PREFIX)/readme-service.txt \
+        $(DESTDIR)$(WINDOWS_PREFIX)/LICENSE* \
+        $(DESTDIR)$(WINDOWS_PREFIX)/INSTALL \
+        $(DESTDIR)$(WINDOWS_PREFIX)/etc/rabbitmq.config.example; do \
+               $(UNIX_TO_DOS) "$$file"; \
+               case "$$file" in \
+               *.txt) ;; \
+               *.example) ;; \
+               *) mv "$$file" "$$file.txt" ;; \
+               esac; \
+       done
+
+test-upgrade:
+       $(MAKE) -C upgrade
index b7f34327978a7a32b9663a30eaeb7ffe00ad8783..00e4b7f6316aa5d23de5bb1a0450884de920c57e 100644 (file)
-rabbitmq-server (3.5.6-1~u14.04+mos5) mos8.0; urgency=medium
+rabbitmq-server (3.6.6-1~u14.04+mos1) mos8.0; urgency=medium
 
-  * Backport https://github.com/rabbitmq/rabbitmq-common/pull/54
+  * new upstream release
 
- -- Alexey Lebedeff <alebedev@mirantis.com>  Mon, 15 Feb 2016 19:17:40 +0300
+ -- Alexey Lebedeff <alebedev@mirantis.com>  Fri, 11 Nov 2016 15:06:56 +0000
 
-rabbitmq-server (3.5.6-1~u14.04+mos4) mos8.0; urgency=medium
+rabbitmq-server (3.6.5.907-1) unstable; urgency=low
 
-  * Backport https://github.com/rabbitmq/rabbitmq-common/pull/26
-
- -- Alexey Lebedeff <alebedev@mirantis.com>  Thu, 24 Dec 2015 14:28:56 +0300
-
-rabbitmq-server (3.5.6-1~u14.04+mos3) mos8.0; urgency=medium
+  * New Upstream Release
 
-  * Backport https://github.com/rabbitmq/rabbitmq-management/pull/84
+ -- Michael Klishin <michael@rabbitmq.com>  Mon, 21 Nov 2016 10:36:28 +0000
 
- -- Alexey Lebedeff <alebedev@mirantis.com>  Fri, 11 Dec 2015 14:31:08 +0300
+rabbitmq-server (3.6.5-1) unstable; urgency=low
 
-rabbitmq-server (3.5.6-1~u14.04+mos2) mos8.0; urgency=medium
+  * New Upstream Release
 
-  * Backport https://github.com/rabbitmq/rabbitmq-common/pull/18
+ -- Michael Klishin <michael@rabbitmq.com>  Fri, 05 Aug 2016 14:20:47 +0100
 
- -- Alexey Lebedeff <alebedev@mirantis.com>  Thu, 10 Dec 2015 14:54:05 +0300
+rabbitmq-server (3.6.4-1) unstable; urgency=low
 
-rabbitmq-server (3.5.6-1~u14.04+mos1) mos8.0; urgency=medium
+  * New Upstream Release
 
-  * New upstream release.
-  * Disable auto-start on package install.
-  * Increase ulimit on number of file descriptors.
+ -- Michael Klishin <michael@rabbitmq.com>  Fri, 29 Jul 2016 11:40:53 +0100
 
- -- Alexey Lebedeff <alebedev@mirantis.com>  Fri, 16 Oct 2015 13:59:11 +0300
+rabbitmq-server (3.6.3-1) unstable; urgency=low
 
-rabbitmq-server (3.5.4-1) unstable; urgency=medium
+  * New Upstream Release
 
-  * New upstream release.
+ -- Michael Klishin <michael@rabbitmq.com>  Wed, 06 Jul 2016 19:19:21 +0100
 
- -- James Page <james.page@ubuntu.com>  Tue, 04 Aug 2015 14:52:31 +0200
+rabbitmq-server (3.6.2-1) unstable; urgency=low
 
-rabbitmq-server (3.5.1-2) unstable; urgency=medium
+  * New Upstream Release
 
-  [ Tony Breeds ]
-  * systemd: Ensure that rabbitmq has started before marking service as
-    running (LP: #1449056).
+ -- Michael Klishin <michael@rabbitmq.com>  Thu, 19 May 2016 09:20:06 +0100
 
-  [ James Page ]
-  * systemd: Drop use of /etc/default/rabbitmq-server.
+rabbitmq-server (3.6.1-1) unstable; urgency=low
 
- -- James Page <james.page@ubuntu.com>  Tue, 02 Jun 2015 11:40:59 +0100
+  * New Upstream Release
 
-rabbitmq-server (3.5.1-1) unstable; urgency=medium
+ -- Michael Klishin <michael@rabbitmq.com>  Tue, 01 Mar 2016 13:19:57 +0000
 
-  * New upstream release.
+rabbitmq-server (3.6.0-1) unstable; urgency=low
 
- -- James Page <james.page@ubuntu.com>  Wed, 13 May 2015 21:35:52 +0100
+  * New Upstream Release
 
-rabbitmq-server (3.4.3-2) unstable; urgency=medium
+ -- Michael Klishin <michael@rabbitmq.com>  Tue, 22 Dec 2015 13:21:56 +0000
 
-  * Restore missing changes from 3.4.2-4.
+rabbitmq-server (3.5.7-1) unstable; urgency=low
 
- -- James Page <james.page@ubuntu.com>  Mon, 02 Feb 2015 07:44:33 +0200
+  * New Upstream Release
 
-rabbitmq-server (3.4.3-1) unstable; urgency=medium
+ -- Michael Klishin <michael@rabbitmq.com>  Tue, 15 Dec 2015 10:10:46 +0000
 
-  * New upstream point release.
+rabbitmq-server (3.5.6-1) unstable; urgency=low
 
- -- James Page <james.page@ubuntu.com>  Wed, 28 Jan 2015 16:12:32 +0000
+  * New Upstream Release
 
-rabbitmq-server (3.4.2-4) unstable; urgency=medium
+ -- Michael Klishin <michael@rabbitmq.com>  Wed, 07 Oct 2015 13:31:24 +0100
 
-  * Re-added /usr/lib/erlang/lib /var/lib/rabbitmq/mnesia and
-    /var/log/rabbitmq which I removed form the package by mistake on the last
-    upload.
+rabbitmq-server (3.5.5-3) unstable; urgency=low
 
- -- Thomas Goirand <zigo@debian.org>  Wed, 28 Jan 2015 13:11:02 +0000
+  * Fix bashism in rabbitmq-script-wrapper
 
-rabbitmq-server (3.4.2-3) unstable; urgency=medium
+ -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>  Thu, 24 Sep 2015 19:18:17 +0100
 
-  * Removes debian/README which is useless (Closes: #703021).
-  * Provides a default /etc/rabbitmq/rabbitmq-env.conf (Closes: #543638).
+rabbitmq-server (3.5.5-1) unstable; urgency=low
 
- -- Thomas Goirand <zigo@debian.org>  Tue, 27 Jan 2015 15:08:08 +0100
+  * New Upstream Release
 
-rabbitmq-server (3.4.2-2) unstable; urgency=medium
+ -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>  Thu, 24 Sep 2015 10:57:25 +0100
 
-  * d/rabbitmq-server.dirs: Restore missing /etc/rabbitmq directory
-    (LP: #1410155).
+rabbitmq-server (3.5.4-1) unstable; urgency=low
 
- -- James Page <james.page@ubuntu.com>  Tue, 13 Jan 2015 09:53:47 +0000
+  * New Upstream Release
 
-rabbitmq-server (3.4.2-1) unstable; urgency=medium
+ -- Michael Klishin <michael@rabbitmq.com>  Tue, 21 Jul 2015 20:25:48 +0100
 
-  [ James Page ]
-  * New upstream point release.
-  * d/control: Update for new maintainer information, add VCS repository
-    locations.
-  * d/source/format: Switch packaging to source format 3.0 (quilt).
-  * d/compat,control: Bump debhelper compat level to 9.
-  * d/*: wrap-and-sort.
-  * d/*: Move to standard debhelper, drop use of cdbs.
-  * d/rules,control,rabbitmq-server.service: Add systemd service
-    configuration.
-  * d/control: Bumped Standards-Version 3.9.6, no changes.
+rabbitmq-server (3.5.3-1) unstable; urgency=low
 
-  [ Thomas Goirand ]
-  * d/copyright: Rewrote as format 1.0.
+  * New Upstream Release
 
- -- James Page <james.page@ubuntu.com>  Fri, 19 Dec 2014 11:09:20 +0000
+ -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>  Fri, 22 May 2015 11:04:17 +0100
 
-rabbitmq-server (3.4.1-1) unstable; urgency=high
+rabbitmq-server (3.5.2-1) unstable; urgency=low
 
-  * New upstream release.
+  * New Upstream Release
 
- -- Blair Hester <bhester@gopivotal.com>  Tue, 04 Nov 2014 07:33:44 +0100
+ -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>  Tue, 12 May 2015 16:21:44 +0100
 
-rabbitmq-server (3.3.5-1) unstable; urgency=low
+rabbitmq-server (3.5.1-1) unstable; urgency=low
 
-  * New upstream release:
-    - Provides unminimized versions of all bundled Javascript
-      libraries (Closes: #736781).
-  * d/control: Added Blair Hester to Uploaders, dropped Emile Joubert 
-    (thanks for all your work Emile!).
+  * New Upstream Release
 
- -- Blair Hester <bhester@gopivotal.com>  Tue, 12 Aug 2014 11:47:14 +0100
+ -- Michael Klishin <michael@rabbitmq.com>  Thu, 02 Apr 2015 10:17:30 +0100
 
-rabbitmq-server (3.3.4-1) unstable; urgency=low
+rabbitmq-server (3.5.0-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 24 Jun 2014 18:00:48 +0100
+ -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>  Wed, 11 Mar 2015 13:56:19 +0000
 
-rabbitmq-server (3.3.3-1) unstable; urgency=low
+rabbitmq-server (3.4.4-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 17 Jun 2014 16:59:14 +0100
+ -- Michael Klishin <michael@rabbitmq.com>  Wed, 11 Feb 2015 12:05:01 +0000
 
-rabbitmq-server (3.3.1-1) unstable; urgency=low
+rabbitmq-server (3.4.3-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 29 Apr 2014 21:05:49 +0100
+ -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>  Tue, 06 Jan 2015 15:58:45 +0000
 
-rabbitmq-server (3.3.0-1) unstable; urgency=low
+rabbitmq-server (3.4.2-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Wed, 02 Apr 2014 16:23:08 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Wed, 26 Nov 2014 12:11:12 +0000
 
-rabbitmq-server (3.2.4-1.1) unstable; urgency=high
+rabbitmq-server (3.4.1-1) unstable; urgency=low
 
-  * Non-maintainer upload.
-  * Bind on 127.0.0.1 by default, to avoid listening on all ipv6 interface with
-    guest/guest as default configured user. Note that this only fixes *new*
-    installation, and that any already existing setup will have to edit the
-    /etc/rabbitmq/rabbitmq-env.conf manually if affected. (Closes: #727607)
-  * Removed useless and deprecated DM-Upload field.
-  * Cleans plugins-src/rabbitmq-server to be able to build twice. Also cleans
-    debian/postrm which is generated from debian/postrm.in and plugins/README.
+  * New Upstream Release
 
- -- Thomas Goirand <zigo@debian.org>  Mon, 31 Mar 2014 06:11:46 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Wed, 29 Oct 2014 13:31:10 +0000
 
-rabbitmq-server (3.2.4-1) unstable; urgency=low
+rabbitmq-server (3.4.0-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 04 Mar 2014 13:21:45 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Tue, 21 Oct 2014 14:21:36 +0100
 
-rabbitmq-server (3.2.3-1) unstable; urgency=low
+rabbitmq-server (3.3.5-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
+  * Changed Uploaders from Emile Joubert to Blair Hester
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 23 Jan 2014 16:49:45 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Mon, 11 Aug 2014 12:23:31 +0100
 
-rabbitmq-server (3.2.2-1) unstable; urgency=low
+rabbitmq-server (3.3.4-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Wed, 11 Dec 2013 17:31:14 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Tue, 24 Jun 2014 12:50:29 +0100
 
-rabbitmq-server (3.2.1-1) unstable; urgency=low
+rabbitmq-server (3.3.3-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Mon, 11 Nov 2013 09:49:42 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Mon, 16 Jun 2014 13:00:00 +0100
 
-rabbitmq-server (3.2.0-1) unstable; urgency=low
+rabbitmq-server (3.3.2-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Wed, 23 Oct 2013 15:42:19 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Mon, 09 Jun 2014 10:25:22 +0100
 
-rabbitmq-server (3.1.4-1) unstable; urgency=low
+rabbitmq-server (3.3.1-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 07 Aug 2013 15:16:28 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Tue, 29 Apr 2014 11:49:23 +0100
 
-rabbitmq-server (3.1.3-1) unstable; urgency=low
+rabbitmq-server (3.3.0-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 27 Jun 2013 14:06:11 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Wed, 02 Apr 2014 14:23:14 +0100
 
-rabbitmq-server (3.1.2-1) unstable; urgency=low
+rabbitmq-server (3.2.4-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 25 Jun 2013 11:28:52 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Mon, 03 Mar 2014 14:50:18 +0000
 
-rabbitmq-server (3.1.1-1) unstable; urgency=low
+rabbitmq-server (3.2.3-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 28 May 2013 11:15:13 +0100
+ -- Emile Joubert <emile@rabbitmq.com>  Thu, 23 Jan 2014 14:46:37 +0000
 
-rabbitmq-server (3.1.0-1) unstable; urgency=low
+rabbitmq-server (3.2.2-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 02 May 2013 11:19:31 +0100
+ -- Emile Joubert <emile@rabbitmq.com>  Tue, 10 Dec 2013 16:08:08 +0000
 
-rabbitmq-server (3.0.4-1) unstable; urgency=low
+rabbitmq-server (3.2.0-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Wed, 13 Mar 2013 10:53:18 +0000
+ -- Emile Joubert <emile@rabbitmq.com>  Wed, 23 Oct 2013 12:44:10 +0100
 
-rabbitmq-server (3.0.4-1) unstable; urgency=low
+rabbitmq-server (3.1.5-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Wed, 13 Mar 2013 10:53:18 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Thu, 15 Aug 2013 11:03:13 +0100
 
-rabbitmq-server (3.0.3-1) unstable; urgency=low
+rabbitmq-server (3.1.3-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 07 Mar 2013 10:03:31 +0000
+ -- Tim Watson <tim@rabbitmq.com>  Tue, 25 Jun 2013 15:01:12 +0100
 
-rabbitmq-server (3.0.2-1) unstable; urgency=low
+rabbitmq-server (3.1.2-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 31 Jan 2013 15:28:12 +0000
+ -- Tim Watson <tim@rabbitmq.com>  Mon, 24 Jun 2013 11:16:41 +0100
 
-rabbitmq-server (3.0.1-1) unstable; urgency=low
+rabbitmq-server (3.1.1-1) unstable; urgency=low
 
-  * New upstream release
+  * Test release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 11 Dec 2012 15:47:52 +0000
+ -- Tim Watson <tim@rabbitmq.com>  Mon, 20 May 2013 16:21:20 +0100
 
-rabbitmq-server (3.0.0-1) unstable; urgency=low
+rabbitmq-server (3.1.0-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Mon, 19 Nov 2012 11:42:31 +0000
+ -- Simon MacMullen <simon@rabbitmq.com>  Wed, 01 May 2013 11:57:58 +0100
 
-rabbitmq-server (2.8.7-1) unstable; urgency=low
+rabbitmq-server (3.0.1-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 27 Sep 2012 16:28:21 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Tue, 11 Dec 2012 11:29:55 +0000
 
-rabbitmq-server (2.8.6-1) unstable; urgency=low
+rabbitmq-server (3.0.0-1) unstable; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Wed, 22 Aug 2012 13:28:21 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Fri, 16 Nov 2012 14:15:29 +0000
 
-rabbitmq-server (2.8.5-1) unstable; urgency=low
+rabbitmq-server (2.7.1-1) natty; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 02 Aug 2012 16:12:21 +0100
+ -- Steve Powell <steve@rabbitmq.com>  Fri, 16 Dec 2011 12:12:36 +0000
 
-rabbitmq-server (2.8.4-1) unstable; urgency=low
+rabbitmq-server (2.7.0-1) natty; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Fri, 22 Jun 2012 17:48:28 +0100
+ -- Steve Powell <steve@rabbitmq.com>  Tue, 08 Nov 2011 16:47:50 +0000
 
-rabbitmq-server (2.8.3-1) unstable; urgency=low
+rabbitmq-server (2.6.1-1) natty; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Thu, 21 Jun 2012 13:38:57 +0100
+ -- Tim <tim@rabbitmq.com>  Fri, 09 Sep 2011 14:38:45 +0100
 
-rabbitmq-server (2.8.2-2) unstable; urgency=low
+rabbitmq-server (2.6.0-1) natty; urgency=low
 
-  * Add version numbers to plugins
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 01 May 2012 10:48:57 +0100
+ -- Tim <tim@rabbitmq.com>  Fri, 26 Aug 2011 16:29:40 +0100
 
-rabbitmq-server (2.8.2-1) unstable; urgency=low
+rabbitmq-server (2.5.1-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Mon, 30 Apr 2012 14:07:32 +0100
+ -- Simon MacMullen <simon@rabbitmq.com>  Mon, 27 Jun 2011 11:21:49 +0100
 
-rabbitmq-server (2.8.1-1) unstable; urgency=low
+rabbitmq-server (2.5.0-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Fri, 23 Mar 2012 10:05:24 +0000
+ --  <jerryk@vmware.com>  Thu, 09 Jun 2011 07:20:29 -0700
 
-rabbitmq-server (2.8.0-1) unstable; urgency=low
+rabbitmq-server (2.4.1-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- Emile Joubert <emile@rabbitmq.com>  Tue, 20 Mar 2012 11:55:10 +0000
+ -- Alexandru Scvortov <alexandru@rabbitmq.com>  Thu, 07 Apr 2011 16:49:22 +0100
 
-rabbitmq-server (2.6.1-2) unstable; urgency=low
+rabbitmq-server (2.4.0-1) lucid; urgency=low
 
-  * Add DM-Upload-Allowed flag to control file to allow Maintainer uploads
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Mon, 19 Mar 2012 21:13:54 +0200
+ -- Alexandru Scvortov <alexandru@rabbitmq.com>  Tue, 22 Mar 2011 17:34:31 +0000
 
-rabbitmq-server (2.6.1-1) unstable; urgency=low
+rabbitmq-server (2.3.1-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Tue, 27 Sep 2011 17:53:57 +0200
+ -- Simon MacMullen <simon@rabbitmq.com>  Thu, 03 Feb 2011 12:43:56 +0000
 
-rabbitmq-server (2.5.0-1) unstable; urgency=low
+rabbitmq-server (2.3.0-1) lucid; urgency=low
 
-  * New upstream release 
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Thu, 16 Jun 2011 09:55:40 +0200
+ -- Simon MacMullen <simon@rabbitmq.com>  Tue, 01 Feb 2011 12:52:16 +0000
 
-rabbitmq-server (2.4.1-1) unstable; urgency=low
+rabbitmq-server (2.2.0-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sat, 09 Apr 2011 09:34:06 +0200
+ -- Rob Harrop <rob@rabbitmq.com>  Mon, 29 Nov 2010 12:24:48 +0000
 
-rabbitmq-server (2.4.0-1) unstable; urgency=low
+rabbitmq-server (2.1.1-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Wed, 23 Mar 2011 21:11:17 +0200
+ -- Vlad Alexandru Ionescu <vlad@rabbitmq.com>  Tue, 19 Oct 2010 17:20:10 +0100
 
-rabbitmq-server (2.3.1-1) unstable; urgency=low
+rabbitmq-server (2.1.0-1) lucid; urgency=low
 
-  * New upstream release, closes: #611253
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sat, 05 Feb 2011 10:21:16 +0200
+ -- Marek Majkowski <marek@rabbitmq.com>  Tue, 14 Sep 2010 14:20:17 +0100
 
-rabbitmq-server (2.2.0-1) unstable; urgency=low
+rabbitmq-server (2.0.0-1) karmic; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Thu, 02 Dec 2010 20:41:53 +0200
+ -- Michael Bridgen <mikeb@rabbitmq.com>  Mon, 23 Aug 2010 14:55:39 +0100
 
-rabbitmq-server (2.1.0-1) unstable; urgency=low
+rabbitmq-server (1.8.1-1) lucid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Mon, 27 Sep 2010 20:28:06 +0200
+ -- Emile Joubert <emile@rabbitmq.com>  Wed, 14 Jul 2010 15:05:24 +0100
 
-rabbitmq-server (2.0.0-2) unstable; urgency=low
+rabbitmq-server (1.8.0-1) intrepid; urgency=low
 
-  * Fix various scripts that were not updated correctly in
-    - the 2.0.0-1 package, closes: #594724
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Thu, 02 Sep 2010 18:01:37 +0200
+ -- Matthew Sackman <matthew@rabbitmq.com>  Tue, 15 Jun 2010 12:48:48 +0100
 
-rabbitmq-server (2.0.0-1) unstable; urgency=low
+rabbitmq-server (1.7.2-1) intrepid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sat, 28 Aug 2010 11:21:48 +0200
+ -- Matthew Sackman <matthew@lshift.net>  Mon, 15 Feb 2010 15:54:47 +0000
 
-rabbitmq-server (1.8.1-1) unstable; urgency=low
+rabbitmq-server (1.7.1-1) intrepid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sun, 01 Aug 2010 15:47:46 +0200
+ -- Matthew Sackman <matthew@lshift.net>  Fri, 22 Jan 2010 14:14:29 +0000
 
-rabbitmq-server (1.8.0-1) unstable; urgency=low
+rabbitmq-server (1.7.0-1) intrepid; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Thu, 24 Jun 2010 18:43:04 +0200
+ -- David Wragg <dpw@lshift.net>  Mon, 05 Oct 2009 13:44:41 +0100
 
-rabbitmq-server (1.7.0-3) unstable; urgency=low
+rabbitmq-server (1.6.0-1) hardy; urgency=low
 
-  * Add missing entries in rabbitmq-server.init
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sun, 25 Oct 2009 10:21:25 +0200
+ -- Matthias Radestock <matthias@lshift.net>  Tue, 16 Jun 2009 15:02:58 +0100
 
-rabbitmq-server (1.7.0-2) unstable; urgency=low
+rabbitmq-server (1.5.5-1) hardy; urgency=low
 
-  * moved debian/init.d to rabbitmq-server.init
-  * included fixes to rabbitmq-script-wrapper
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Wed, 14 Oct 2009 12:23:52 +0200
+ -- Matthias Radestock <matthias@lshift.net>  Tue, 19 May 2009 09:57:54 +0100
 
-rabbitmq-server (1.7.0-1) unstable; urgency=low
+rabbitmq-server (1.5.4-1) hardy; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sat, 10 Oct 2009 13:28:39 +0200
+ -- Matthias Radestock <matthias@lshift.net>  Mon, 06 Apr 2009 09:19:32 +0100
 
-rabbitmq-server (1.6.0-1) unstable; urgency=low
+rabbitmq-server (1.5.3-1) hardy; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Sat, 20 Jun 2009 12:22:17 +0200
+ -- Tony Garnock-Jones <tonyg@lshift.net>  Tue, 24 Feb 2009 18:23:33 +0000
 
-rabbitmq-server (1.5.5-3) unstable; urgency=low
+rabbitmq-server (1.5.2-1) hardy; urgency=low
 
-  *  debian/control: Reduce Erlang dependencies to just
-     erlang-mnesia and erlang-os-mon, closes: #532867
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Tue, 16 Jun 2009 14:15:20 +0200
+ -- Tony Garnock-Jones <tonyg@lshift.net>  Mon, 23 Feb 2009 16:03:38 +0000
 
-rabbitmq-server (1.5.5-2) unstable; urgency=low
+rabbitmq-server (1.5.1-1) hardy; urgency=low
 
-  * Include updates to debian package by rabbit team:
-    - quiet log rotate 
-    - update build-depends and depends for new erlang packages
-    - debian/watch file
-    - add rabbitmq-script-wrapper
-    - update init.d scripts
-    - clean /etc/rabbitmq in postrm
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Wed, 03 Jun 2009 12:23:50 +0200
+ -- Simon MacMullen <simon@lshift.net>  Mon, 19 Jan 2009 15:46:13 +0000
 
-rabbitmq-server (1.5.5-1) unstable; urgency=low
+rabbitmq-server (1.5.0-1) testing; urgency=low
 
-  * New upstream release
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Tue, 02 Jun 2009 12:53:32 +0200
+ -- Matthias Radestock <matthias@lshift.net>  Wed, 17 Dec 2008 18:23:47 +0000
 
-rabbitmq-server (1.5.4-4) unstable; urgency=low
+rabbitmq-server (1.4.0-1) testing; urgency=low
 
-  * Add new dependency on erlang-os-mon to work with new erlang packages in
-    debian
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Mon, 11 May 2009 21:20:04 +0200
+ -- Tony Garnock-Jones <tonyg@lshift.net>  Thu, 24 Jul 2008 13:21:48 +0100
 
-rabbitmq-server (1.5.4-3) unstable; urgency=low
+rabbitmq-server (1.3.0-1) testing; urgency=low
 
-  * Previous partial upload failed
+  * New Upstream Release
 
- -- John Leuner <jewel@debian.org>  Fri, 10 Apr 2009 20:25:51 +0200
+ -- Adrien Pierard <adrien@lshift.net>  Mon, 03 Mar 2008 15:34:38 +0000
 
-rabbitmq-server (1.5.4-2) unstable; urgency=low
+rabbitmq-server (1.2.0-2) testing; urgency=low
 
-  * Fix incorrect changed-by field in .changes file
+  * Fixed rabbitmqctl wrapper script
 
- -- John Leuner <jewel@debian.org>  Fri, 10 Apr 2009 19:40:04 +0200
+ -- Simon MacMullen <simon@lshift.net>  Fri, 05 Oct 2007 11:55:00 +0100
 
-rabbitmq-server (1.5.4-1) unstable; urgency=low
+rabbitmq-server (1.2.0-1) testing; urgency=low
 
   * New upstream release
 
- -- John Leuner <jewel@debian.org>  Fri, 10 Apr 2009 17:54:21 +0200
-
-rabbitmq-server (1.5.0-5) unstable; urgency=low
-
-  * Include the full license for amqp-0.8.json in debian/copyright
+ -- Simon MacMullen <simon@lshift.net>  Wed, 26 Sep 2007 11:49:26 +0100
 
- -- John Leuner <jewel@debian.org>  Fri, 27 Feb 2009 16:16:54 +0200
+rabbitmq-server (1.1.1-1) testing; urgency=low
 
-rabbitmq-server (1.5.0-4) unstable; urgency=low
+  * New upstream release
 
-  * Clarify and explicitly list the license and copyright for
-    codegen/amqp-0.8.json
-       
-  * Explicitly list the authors and copyright for the rest of the codebase
-    at the top of debian/copyright
-       
-  * Clarify the copyright of the files in src/tcp_* . The rabbitmq authors have
-    confirmed that they are the original authors of this code and that the
-    files at http://code.google.com/p/cacherl/ were taken without their
-    knowledge from the rabbitmq project. (Comparing the commit dates in version
-    control shows that cacherl is newer)
+ -- Simon MacMullen <simon@lshift.net>  Wed, 29 Aug 2007 12:03:15 +0100
 
- -- John Leuner <jewel@debian.org>  Wed, 25 Feb 2009 13:10:15 +0200
+rabbitmq-server (1.1.0-alpha-2) testing; urgency=low
 
-rabbitmq-server (1.5.0-3) unstable; urgency=low
+  * Fixed erlang-nox dependency
 
-  * Previous changelog entry had an incorrect Maintainer name
+ -- Simon MacMullen <simon@lshift.net>  Thu, 02 Aug 2007 11:27:13 +0100
 
- -- John Leuner <jewel@debian.org>  Wed, 28 Jan 2009 16:45:33 +0200
+rabbitmq-server (1.1.0-alpha-1) testing; urgency=low
 
-rabbitmq-server (1.5.0-2) unstable; urgency=low
+  * New upstream release
 
-  * Reupload package to unstable. Mistakenly uploaded to testing last time, closes: #507902
+ -- Simon MacMullen <simon@lshift.net>  Fri, 20 Jul 2007 18:17:33 +0100
 
- -- John Leuner <jewel@debian.org>  Mon, 19 Jan 2009 17:38:43 +0200
+rabbitmq-server (1.0.0-alpha-1) unstable; urgency=low
 
-rabbitmq-server (1.5.0-1) testing; urgency=low
+  * Initial release
 
-  * New Upstream Release
-  * First Debian upload, closes: #507902
+ -- Tony Garnock-Jones <tonyg@shortstop.lshift.net>  Wed, 31 Jan 2007 19:06:33 +0000
 
- -- John Leuner <jewel@debian.org>  Wed, 17 Dec 2008 18:23:47 +0000
index dd615458646f13ac9e6dc801a0e4af22f3925b76..a962c7020412d10a1834dff6773278fb83a5cd46 100644 (file)
@@ -1,30 +1,24 @@
 Source: rabbitmq-server
 Section: net
 Priority: extra
-Maintainer: PKG OpenStack <openstack-devel@lists.alioth.debian.org>
-Uploaders: James Page <james.page@ubuntu.com>, Thomas Goirand <zigo@debian.org>
-Build-Depends: debhelper (>= 9~),
-               dh-systemd (>= 1.5),
-               erlang-dev,
-               erlang-nox (>= 1:13.b.3),
-               erlang-src (>= 1:13.b.3),
-               python-simplejson,
-               unzip,
-               xmlto,
-               xsltproc,
-               zip
-Standards-Version: 3.9.6
-Vcs-Browser: http://anonscm.debian.org/gitweb/?p=openstack/rabbitmq-server.git
-Vcs-Git: git://anonscm.debian.org/openstack/rabbitmq-server.git
-Homepage: http://www.rabbitmq.com/
+Maintainer: RabbitMQ Team <info@rabbitmq.com>
+Uploaders: Michael Klishin <michael@rabbitmq.com>,
+ Karl Nilsson <knilsson@rabbitmq.com>,
+ Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+Build-Depends: debhelper (>= 9),
+ dh-systemd (>= 1.5),
+ erlang-dev, erlang-src,
+ python-simplejson,
+ xmlto,
+ xsltproc,
+ erlang-nox (>= 1:16.b.3) | esl-erlang,
+ zip,
+ rsync
+Standards-Version: 3.9.4
 
 Package: rabbitmq-server
 Architecture: all
-Depends: adduser,
-         erlang-nox (>= 1:13.b.3) | esl-erlang,
-         logrotate,
-         ${misc:Depends}
-Description: AMQP server written in Erlang
- RabbitMQ is an implementation of AMQP, the emerging standard for high
- performance enterprise messaging. The RabbitMQ server is a robust and
- scalable implementation of an AMQP broker.
+Depends: erlang-nox (>= 1:16.b.3) | esl-erlang, adduser, logrotate, socat, init-system-helpers (>= 1.13~)
+Description: Multi-protocol messaging broker
+ RabbitMQ is an open source multi-protocol messaging broker.
+Homepage: http://www.rabbitmq.com/
index 3ce58ffa780c25bfb974d8bcb8063b0bfecb2531..521b903754223817744df3ad838273a55868444e 100644 (file)
 Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: RabbitMQ
-Source: http://www.rabbitmq.com/
-
-Files: debian/*
-Copyright: (c) 2007-2013, GoPivotal, Inc.
-           (c) 2007, Tony Garnock-Jones <tonyg@rabbitmq.com>
-           (c) 2014, Blair Hester <bhester@gopivotal.com>
-           (c) 2012-2014, Emile Joubert <emile@rabbitmq.com>
-           (c) 2008-2012, John Leuner <jewel@debian.org>
-           (c) 2014, James Page <james.page@canonical.com>
-           (c) 2014, Thomas Goirand <zigo@debian.org>
-License: MPL-1.1
-
-Files: codegen/amqp-rabbitmq-*.json
-Copyright: (c) 2008-2013, GoPivotal Inc.
-License: Expat
-
-Files: plugins-src/rabbitmq-management/priv/www/js/jquery*.js
-Copyright: (c) 2010 John Resig
-License: Expat
-Comments: Downloaded from http://jquery.com/
-
-Files: plugins-src/rabbitmq-management/priv/www/js/ejs*
- plugins-src/rabbitmq-management/priv/www/js/tmpl
-Copyright: (c) 2007, Edward Benson
-License: Expat
-Comments: downloaded from http://embeddedjs.com/
-
-Files: plugins-src/rabbitmq-management/priv/www/js/sammy*.js
-Copyright: (c) 2008 Aaron Quint, Quirkey NYC, LLC
-License: Expat
-Comments: Downloaded from http://code.quirkey.com/sammy/
-
-Files: plugins-src/rabbitmq-management/priv/www/js/excanvas*.js
-Copyright: (c) 2006, Google Inc
-License: Apache-2.0
-Comments: Downloaded from http://code.google.com/p/explorercanvas/
-
-Files: plugins-src/rabbitmq-management/priv/www/js/jquery.flot*.js
-Copyright: (c) 2007-2013, IOLA and Ole Laursen
-License: Expat
-Comments: Downloaded from http://www.flotcharts.org/
-
-Files: plugins-src/webmachine-wrapper/*
-Copyright: (c) Basho Technologies
-License: Apache-2.0
-Comments: Downloaded from http://webmachine.basho.com/
-
-Files: plugins-src/eldap-wrapper/*
-Copyright: (c) 2010, Torbjorn Tornkvist
-License: Expat
-Comments: Downloaded from https://github.com/etnt/eldap
-
-Files: plugins-src/mochiweb-wrapper/mochiweb-git/*
-Copyright: (c) 2007, Mochi Media, Inc.
-License: Expat
-Comments: Downloaded from http://github.com/mochi/mochiweb/
-
-Files: 
- plugins-src/rabbitmq-management-visualiser/priv/www/visualiser/js/glMatrix*.js
-Copyright: (c) 2011, Brandon Jones
-License: BSD-2-Clause
-Comments: Downloaded from http://code.google.com/p/glmatrix/
+Upstream-Name: rabbitmq-server
+Upstream-Contact: Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+Source: https://github.com/rabbitmq/rabbitmq-server
 
 Files: *
-Copyright: (c) 2007-2014 GoPivotal, Inc.
+Copyright: 2007-2015 Pivotal Software, Inc.
 License: MPL-1.1
 
-License: Expat
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this file (the Software), to deal in the Software without restriction,
- including without limitation the rights to use, copy, modify, merge, publish,
- distribute, sublicense, and/or sell copies of the Software, and to permit
- persons to whom the Software is furnished to do so, subject to the following
- conditions:
- .
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
- .
- THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
- IN THE SOFTWARE."
-
-License: BSD-2-Clause
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are met:
- .
-  1. Redistributions of source code must retain the above copyright notice,
-     this list of conditions and the following disclaimer.
- .
-  2. Redistributions in binary form must reproduce the above copyright
-     notice, this list of conditions and the following disclaimer in the
-     documentation and/or other materials provided with the distribution.
- .
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- POSSIBILITY OF SUCH DAMAGE."
+Files: src/mochinum.erl deps/rabbit_common/src/mochijson2.erl
+Copyright: 2007 Mochi Media, Inc.
+License: MIT
 
 License: MPL-1.1
- MOZILLA PUBLIC LICENSE Version 1.1
- 1. Definitions.
- .
- 1.0.1. "Commercial Use" means distribution or otherwise making the Covered
- Code available to a third party.
- .
- 1.1. "Contributor" means each entity that creates or contributes to the
- creation of Modifications.
- .
- 1.2. "Contributor Version" means the combination of the Original Code, prior
- Modifications used by a Contributor, and the Modifications made by that
- particular Contributor.
- .
- 1.3. "Covered Code" means the Original Code or Modifications or the
- combination of the Original Code and Modifications, in each case including
- portions thereof.
- .
- 1.4. "Electronic Distribution Mechanism" means a mechanism generally accepted
- in the software development community for the electronic transfer of data.
- .
- 1.5. "Executable" means Covered Code in any form other than Source Code.
- .
- 1.6. "Initial Developer" means the individual or entity identified as the
- Initial Developer in the Source Code notice required by Exhibit A.
- .
- 1.7. "Larger Work" means a work which combines Covered Code or portions
- thereof with code not governed by the terms of this License.
- .
- 1.8. "License" means this document.
- .
- 1.8.1. "Licensable" means having the right to grant, to the maximum extent
- possible, whether at the time of the initial grant or subsequently acquired,
- any and all of the rights conveyed herein.
- .
- 1.9. "Modifications" means any addition to or deletion from the substance or
- structure of either the Original Code or any previous Modifications. When
- Covered Code is released as a series of files, a Modification is:
- .
- A. Any addition to or deletion from the contents of a file containing
-    Original Code or previous Modifications.
- .
- B. Any new file that contains any part of the Original Code or previous
-    Modifications.
- .
- 1.10. "Original Code" means Source Code of computer software code which is
- described in the Source Code notice required by Exhibit A as Original Code,
- and which, at the time of its release under this License is not already
- Covered Code governed by this License.
- .
- 1.10.1. "Patent Claims" means any patent claim(s), now owned or hereafter
- acquired, including without limitation,  method, process, and apparatus
- claims, in any patent Licensable by grantor.
- .
- 1.11. "Source Code" means the preferred form of the Covered Code for making
- modifications to it, including all modules it contains, plus any associated
- interface definition files, scripts used to control compilation and
- installation of an Executable, or source code differential comparisons
- against either the Original Code or another well known, available Covered
- Code of the Contributor's choice. The Source Code can be in a compressed or
- archival form, provided the appropriate decompression or de-archiving
- software is widely available for no charge.
- .
- 1.12. "You" (or "Your")  means an individual or a legal entity exercising
- rights under, and complying with all of the terms of, this License or a
- future version of this License issued under Section 6.1. For legal entities,
- "You" includes any entity which controls, is controlled by, or is under
- common control with You. For purposes of this definition, "control" means (a)
- the power, direct or indirect, to cause the direction or management of such
- entity, whether by contract or otherwise, or (b) ownership of more than fifty
- percent (50%) of the outstanding shares or beneficial ownership of such
- entity.
- .
- 2. Source Code License.
- .
- 2.1. The Initial Developer Grant.
- The Initial Developer hereby grants You a world-wide, royalty-free,
- non-exclusive license, subject to third party intellectual property claims:
- .
- (a) under intellectual property rights (other than patent or
-     trademark) Licensable by Initial Developer to use, reproduce,
-     modify, display, perform, sublicense and distribute the Original
-     Code (or portions thereof) with or without Modifications, and/or
-     as part of a Larger Work; and
- .
- (b) under Patents Claims infringed by the making, using or
-     selling of Original Code, to make, have made, use, practice,
-     sell, and offer for sale, and/or otherwise dispose of the
-     Original Code (or portions thereof).
- .
- (c) the licenses granted in this Section 2.1(a) and (b) are
-     effective on the date Initial Developer first distributes
-     Original Code under the terms of this License.
- .
- (d) Notwithstanding Section 2.1(b) above, no patent license is
-     granted: 1) for code that You delete from the Original Code; 2)
-     separate from the Original Code;  or 3) for infringements caused
-     by: i) the modification of the Original Code or ii) the
-     combination of the Original Code with other software or devices.
- .
- 2.2. Contributor Grant.
- Subject to third party intellectual property claims, each Contributor hereby
- grants You a world-wide, royalty-free, non-exclusive license
- .
- (a) under intellectual property rights (other than patent or
-     trademark) Licensable by Contributor, to use, reproduce, modify,
-     display, perform, sublicense and distribute the Modifications
-     created by such Contributor (or portions thereof) either on an
-     unmodified basis, with other Modifications, as Covered Code
-     and/or as part of a Larger Work; and
- .
- (b) under Patent Claims infringed by the making, using, or
-     selling of  Modifications made by that Contributor either alone
-     and/or in combination with its Contributor Version (or portions
-     of such combination), to make, use, sell, offer for sale, have
-     made, and/or otherwise dispose of: 1) Modifications made by that
-     Contributor (or portions thereof); and 2) the combination of
-     Modifications made by that Contributor with its Contributor
-     Version (or portions of such combination).
- .
- (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
-     effective on the date Contributor first makes Commercial Use of
-     the Covered Code.
- .
- (d) Notwithstanding Section 2.2(b) above, no patent license is
-     granted: 1) for any code that Contributor has deleted from the
-     Contributor Version; 2)  separate from the Contributor Version;
-     3)  for infringements caused by: i) third party modifications of
-     Contributor Version or ii)  the combination of Modifications made
-     by that Contributor with other software  (except as part of the
-     Contributor Version) or other devices; or 4) under Patent Claims
-     infringed by Covered Code in the absence of Modifications made by
-     that Contributor.
- .
- 3. Distribution Obligations.
- .
- 3.1. Application of License.
- .
- The Modifications which You create or to which You contribute are governed by
- the terms of this License, including without limitation Section 2.2. The
- Source Code version of Covered Code may be distributed only under the terms of
- this License or a future version of this License released under Section 6.1,
- and You must include a copy of this License with every copy of the Source Code
- You distribute. You may not offer or impose any terms on any Source Code
- version that alters or restricts the applicable version of this License or the
- recipients' rights hereunder. However, You may include an additional document
- offering the additional rights described in Section 3.5.
- .
- 3.2. Availability of Source Code.
- .
- Any Modification which You create or to which You contribute must be made
- available in Source Code form under the terms of this License either on the
- same media as an Executable version or via an accepted Electronic Distribution
- Mechanism to anyone to whom you made an Executable version available; and if
- made available via Electronic Distribution Mechanism, must remain available
- for at least twelve (12) months after the date it initially became available,
- or at least six (6) months after a subsequent version of that particular
- Modification has been made available to such recipients. You are responsible
- for ensuring that the Source Code version remains available even if the
- Electronic Distribution Mechanism is maintained by a third party.
- .
- 3.3. Description of Modifications.
- .
- You must cause all Covered Code to which You contribute to contain a file
- documenting the changes You made to create that Covered Code and the date of
- any change. You must include a prominent statement that the Modification is
- derived, directly or indirectly, from Original Code provided by the Initial
- Developer and including the name of the Initial Developer in (a) the Source
- Code, and (b) in any notice in an Executable version or related documentation
- in which You describe the origin or ownership of the Covered Code.
- .
- 3.4. Intellectual Property Matters
- .
- (a) Third Party Claims.
- .
- If Contributor has knowledge that a license under a third party's
- intellectual property rights is required to exercise the rights
- granted by such Contributor under Sections 2.1 or 2.2,
- Contributor must include a text file with the Source Code
- distribution titled "LEGAL" which describes the claim and the
- party making the claim in sufficient detail that a recipient will
- know whom to contact. If Contributor obtains such knowledge after
- the Modification is made available as described in Section 3.2,
- Contributor shall promptly modify the LEGAL file in all copies
- Contributor makes available thereafter and shall take other steps
- (such as notifying appropriate mailing lists or newsgroups)
- reasonably calculated to inform those who received the Covered
- Code that new knowledge has been obtained.
- .
- (b) Contributor APIs.
- .
- If Contributor's Modifications include an application programming
- interface and Contributor has knowledge of patent licenses which
- are reasonably necessary to implement that API, Contributor must
- also include this information in the LEGAL file.
- .
- (c) Representations.
- .
- Contributor represents that, except as disclosed pursuant to
- Section 3.4(a) above, Contributor believes that Contributor's
- Modifications are Contributor's original creation(s) and/or
- Contributor has sufficient rights to grant the rights conveyed by
- this License.
- .
- 3.5. Required Notices.
- .
- You must duplicate the notice in Exhibit A in each file of the Source
- Code.  If it is not possible to put such notice in a particular Source
- Code file due to its structure, then You must include such notice in a
- location (such as a relevant directory) where a user would be likely
- to look for such a notice.  If You created one or more Modification(s)
- You may add your name as a Contributor to the notice described in
- Exhibit A.  You must also duplicate this License in any documentation
- for the Source Code where You describe recipients' rights or ownership
- rights relating to Covered Code.  You may choose to offer, and to
- charge a fee for, warranty, support, indemnity or liability
- obligations to one or more recipients of Covered Code. However, You
- may do so only on Your own behalf, and not on behalf of the Initial
- Developer or any Contributor. You must make it absolutely clear than
- any such warranty, support, indemnity or liability obligation is
- offered by You alone, and You hereby agree to indemnify the Initial
- Developer and every Contributor for any liability incurred by the
- Initial Developer or such Contributor as a result of warranty,
- support, indemnity or liability terms You offer.
- .
- 3.6. Distribution of Executable Versions.
- .
- You may distribute Covered Code in Executable form only if the
- requirements of Section 3.1-3.5 have been met for that Covered Code,
- and if You include a notice stating that the Source Code version of
- the Covered Code is available under the terms of this License,
- including a description of how and where You have fulfilled the
- obligations of Section 3.2. The notice must be conspicuously included
- in any notice in an Executable version, related documentation or
- collateral in which You describe recipients' rights relating to the
- Covered Code. You may distribute the Executable version of Covered
- Code or ownership rights under a license of Your choice, which may
- contain terms different from this License, provided that You are in
- compliance with the terms of this License and that the license for the
- Executable version does not attempt to limit or alter the recipient's
- rights in the Source Code version from the rights set forth in this
- License. If You distribute the Executable version under a different
- license You must make it absolutely clear that any terms which differ
- from this License are offered by You alone, not by the Initial
- Developer or any Contributor. You hereby agree to indemnify the
- Initial Developer and every Contributor for any liability incurred by
- the Initial Developer or such Contributor as a result of any such
- terms You offer.
- .
- 3.7. Larger Works.
- .
- You may create a Larger Work by combining Covered Code with other code
- not governed by the terms of this License and distribute the Larger
- Work as a single product. In such a case, You must make sure the
- requirements of this License are fulfilled for the Covered Code.
- .
- 4. Inability to Comply Due to Statute or Regulation.
- .
- If it is impossible for You to comply with any of the terms of this
- License with respect to some or all of the Covered Code due to
- statute, judicial order, or regulation then You must: (a) comply with
- the terms of this License to the maximum extent possible; and (b)
- describe the limitations and the code they affect. Such description
- must be included in the LEGAL file described in Section 3.4 and must
- be included with all distributions of the Source Code. Except to the
- extent prohibited by statute or regulation, such description must be
- sufficiently detailed for a recipient of ordinary skill to be able to
- understand it.
- .
- 5. Application of this License.
- .
- This License applies to code to which the Initial Developer has
- attached the notice in Exhibit A and to related Covered Code.
- .
- 6. Versions of the License.
- .
- 6.1. New Versions.
- .
- Netscape Communications Corporation ("Netscape") may publish revised
- and/or new versions of the License from time to time. Each version
- will be given a distinguishing version number.
- .
- 6.2. Effect of New Versions.
- .
- Once Covered Code has been published under a particular version of the
- License, You may always continue to use it under the terms of that
- version. You may also choose to use such Covered Code under the terms
- of any subsequent version of the License published by Netscape. No one
- other than Netscape has the right to modify the terms applicable to
- Covered Code created under this License.
- .
- 6.3. Derivative Works.
- .
- If You create or use a modified version of this License (which you may
- only do in order to apply it to code which is not already Covered Code
- governed by this License), You must (a) rename Your license so that
- the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
- "MPL", "NPL" or any confusingly similar phrase do not appear in your
- license (except to note that your license differs from this License)
- and (b) otherwise make it clear that Your version of the license
- contains terms which differ from the Mozilla Public License and
- Netscape Public License. (Filling in the name of the Initial
- Developer, Original Code or Contributor in the notice described in
- Exhibit A shall not of themselves be deemed to be modifications of
- this License.)
- .
- 7. DISCLAIMER OF WARRANTY.
- .
- COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
- WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
- WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
- DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
- THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
- IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
- YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
- COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
- OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
- ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
- .
- 8. TERMINATION.
- .
- 8.1.  This License and the rights granted hereunder will terminate
- automatically if You fail to comply with terms herein and fail to cure
- such breach within 30 days of becoming aware of the breach. All
- sublicenses to the Covered Code which are properly granted shall
- survive any termination of this License. Provisions which, by their
- nature, must remain in effect beyond the termination of this License
- shall survive.
- .
- 8.2.  If You initiate litigation by asserting a patent infringement
- claim (excluding declatory judgment actions) against Initial Developer
- or a Contributor (the Initial Developer or Contributor against whom
- You file such action is referred to as "Participant")  alleging that:
- .
- (a)  such Participant's Contributor Version directly or indirectly
- infringes any patent, then any and all rights granted by such
- Participant to You under Sections 2.1 and/or 2.2 of this License
- shall, upon 60 days notice from Participant terminate prospectively,
- unless if within 60 days after receipt of notice You either: (i)
- agree in writing to pay Participant a mutually agreeable reasonable
- royalty for Your past and future use of Modifications made by such
- Participant, or (ii) withdraw Your litigation claim with respect to
- the Contributor Version against such Participant.  If within 60 days
- of notice, a reasonable royalty and payment arrangement are not
- mutually agreed upon in writing by the parties or the litigation claim
- is not withdrawn, the rights granted by Participant to You under
- Sections 2.1 and/or 2.2 automatically terminate at the expiration of
- the 60 day notice period specified above.
- .
- (b)  any software, hardware, or device, other than such Participant's
- Contributor Version, directly or indirectly infringes any patent, then
- any rights granted to You by such Participant under Sections 2.1(b)
- and 2.2(b) are revoked effective as of the date You first made, used,
- sold, distributed, or had made, Modifications made by that
- Participant.
- .
- 8.3.  If You assert a patent infringement claim against Participant
- alleging that such Participant's Contributor Version directly or
- indirectly infringes any patent where such claim is resolved (such as
- by license or settlement) prior to the initiation of patent
- infringement litigation, then the reasonable value of the licenses
- granted by such Participant under Sections 2.1 or 2.2 shall be taken
- into account in determining the amount or value of any payment or
- license.
- .
- 8.4.  In the event of termination under Sections 8.1 or 8.2 above,
- all end user license agreements (excluding distributors and resellers)
- which have been validly granted by You or any distributor hereunder
- prior to termination shall survive termination.
- .
- 9. LIMITATION OF LIABILITY.
- .
- UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
- (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
- DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
- OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
- ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
- CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
- WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
- COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
- INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
- LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
- RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
- PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
- EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
- THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
- .
- 10. U.S. GOVERNMENT END USERS.
- .
- The Covered Code is a "commercial item," as that term is defined in
- 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
- software" and "commercial computer software documentation," as such
- terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
- C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
- all U.S. Government End Users acquire Covered Code with only those
- rights set forth herein.
- .
- 11. MISCELLANEOUS.
- .
- This License represents the complete agreement concerning subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. This License shall be governed by
- California law provisions (except to the extent applicable law, if
- any, provides otherwise), excluding its conflict-of-law provisions.
- With respect to disputes in which at least one party is a citizen of,
- or an entity chartered or registered to do business in the United
- States of America, any litigation relating to this License shall be
- subject to the jurisdiction of the Federal Courts of the Northern
- District of California, with venue lying in Santa Clara County,
- California, with the losing party responsible for costs, including
- without limitation, court costs and reasonable attorneys' fees and
- expenses. The application of the United Nations Convention on
- Contracts for the International Sale of Goods is expressly excluded.
- Any law or regulation which provides that the language of a contract
- shall be construed against the drafter shall not apply to this
- License.
- .
- 12. RESPONSIBILITY FOR CLAIMS.
- .
- As between Initial Developer and the Contributors, each party is
- responsible for claims and damages arising, directly or indirectly,
- out of its utilization of rights under this License and You agree to
- work with Initial Developer and Contributors to distribute such
- responsibility on an equitable basis. Nothing herein is intended or
- shall be deemed to constitute any admission of liability.
- .
- 13. MULTIPLE-LICENSED CODE.
- .
- Initial Developer may designate portions of the Covered Code as
- "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
- Developer permits you to utilize portions of the Covered Code under
- Your choice of the NPL or the alternative licenses, if any, specified
- by the Initial Developer in the file described in Exhibit A.
- .
- EXHIBIT A -Mozilla Public License.
- .
- The contents of this file are subject to the Mozilla Public License Version
- 1.1 (the "License"); you may not use this file except in compliance with the
- License. You may obtain a copy of the License at http://www.mozilla.org/MPL/
- .
- Software distributed under the License is distributed on an "AS IS" basis,
- WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
- the specific language governing rights and limitations under the License.
- .
- The Original Code is RabbitMQ.
- .
- The Initial Developer of the Original Code is GoPivotal, Inc. Copyright (c)
- 2007-2014 GoPivotal, Inc. All rights reserved.
-
-License: Apache-2.0
-  On Debian GNU/Linux system you can find the complete text of the
-  Apache-2.0 license in '/usr/share/common-licenses/Apache-2.0'
+ The contents of this file are subject to the Mozilla Public License
+ Version 1.1 (the "License"); you may not use this file except in
+ compliance with the License. You may obtain a copy of the License at
+ http://www.mozilla.org/MPL/
+ .
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License
+ .
+ The Original Code is RabbitMQ
+ .
+ The Initial Developer of the Original Code is Pivotal Software, Inc.
+ Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+
+License: MIT
+ This is the MIT license
+ .
+ Copyright (c) 2007 Mochi Media, Inc
+ .
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions
+ :
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Software
+ .
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/debian/dirs b/debian/dirs
new file mode 100644 (file)
index 0000000..625b7d4
--- /dev/null
@@ -0,0 +1,9 @@
+usr/lib/rabbitmq/bin
+usr/lib/erlang/lib
+usr/sbin
+usr/share/man
+var/lib/rabbitmq/mnesia
+var/log/rabbitmq
+etc/logrotate.d
+etc/rabbitmq
+
diff --git a/debian/gbp.conf b/debian/gbp.conf
deleted file mode 100644 (file)
index ee339ed..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-[DEFAULT]
-debian-branch = master
-pristine-tar = True
-
-[buildpackage]
-export-dir = ../build-area/
diff --git a/debian/patches/detect-stuck-queue-on-declare.diff b/debian/patches/detect-stuck-queue-on-declare.diff
deleted file mode 100644 (file)
index 00c43f3..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-Description: Detect infinite loop in AMQP channel code
-    Sudden death of cluster node could result in a stuck queue process - this will result in
-    redeclare attempts to hang. With this patch such condition will be detected - AMQP channel will
-    be closed and error will be logged. And probably it could help us to discover underlying bug, by
-    localizing the event in time.
-Author: Alexey Lebedeff <alebedev@mirantis.com>
-Origin: upstream, https://github.com/rabbitmq/rabbitmq-common/pull/26
-Bug:https://github.com/rabbitmq/rabbitmq-server/issues/349
----
-This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
---- a/src/rabbit_amqqueue.erl
-+++ b/src/rabbit_amqqueue.erl
-@@ -385,6 +385,11 @@ not_found_or_absent_dirty(Name) ->
-     end.
- with(Name, F, E) ->
-+    with(Name, F, E, 2000).
-+
-+with(Name, _F, E, 0) ->
-+    E(not_found_or_absent_dirty(Name));
-+with(Name, F, E, RetriesLeft) ->
-     case lookup(Name) of
-         {ok, Q = #amqqueue{state = crashed}} ->
-             E({absent, Q, crashed});
-@@ -397,8 +402,8 @@ with(Name, F, E) ->
-             %% the retry loop.
-             rabbit_misc:with_exit_handler(
-               fun () -> false = rabbit_mnesia:is_process_alive(QPid),
--                        timer:sleep(25),
--                        with(Name, F, E)
-+                        timer:sleep(30),
-+                        with(Name, F, E, RetriesLeft - 1)
-               end, fun () -> F(Q) end);
-         {error, not_found} ->
-             E(not_found_or_absent_dirty(Name))
diff --git a/debian/patches/erlang-18-segfault.diff b/debian/patches/erlang-18-segfault.diff
deleted file mode 100644 (file)
index b9eaa03..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-Description: Fix rabbitmqctl segfault
-  Rabbitmq contains code that is actually undefined behaviour. While it worked in R16, 17 and will
-  work in 19, it was causing segfaults in 18.X
-Author: Alexey Lebedeff <alebedev@mirantis.com>
-Origin: upstream
-Bug: https://github.com/rabbitmq/rabbitmq-common/issues/53
----
-This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
---- a/src/rabbit_misc.erl
-+++ b/src/rabbit_misc.erl
-@@ -652,19 +652,7 @@ format_many(List) ->
-     lists:flatten([io_lib:format(F ++ "~n", A) || {F, A} <- List]).
- format_stderr(Fmt, Args) ->
--    case os:type() of
--        {unix, _} ->
--            Port = open_port({fd, 0, 2}, [out]),
--            port_command(Port, io_lib:format(Fmt, Args)),
--            port_close(Port);
--        {win32, _} ->
--            %% stderr on Windows is buffered and I can't figure out a
--            %% way to trigger a fflush(stderr) in Erlang. So rather
--            %% than risk losing output we write to stdout instead,
--            %% which appears to be unbuffered.
--            io:format(Fmt, Args)
--    end,
--    ok.
-+    io:format(standard_error, Fmt, Args).
- unfold(Fun, Init) ->
-     unfold(Fun, [], Init).
diff --git a/debian/patches/fix-management-startup-after-split.diff b/debian/patches/fix-management-startup-after-split.diff
deleted file mode 100644 (file)
index 252e893..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-Description: Backport management plugin fix
- Backport management plugin fix for bug that prevented node from starting after network split.
-Author: Alexey Lebedeff <alebedev@mirantis.com>
-Origin: backport, https://github.com/rabbitmq/rabbitmq-management/pull/84
-Bug: https://github.com/rabbitmq/rabbitmq-management/issues/81
-Applied-Upstream: 3.5.7
----
-This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
---- a/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup_sup.erl
-+++ b/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup_sup.erl
-@@ -57,7 +57,8 @@ start_child() -> supervisor2:start_child
- %%----------------------------------------------------------------------------
- init([]) ->
--    {ok, {{one_for_one, 0, 1}, [sup()]}}.
-+    timer:apply_after(0, ?MODULE, start_child, []),
-+    {ok, {{one_for_one, 0, 1}, []}}.
- sup() ->
-     {rabbit_mgmt_sup, {rabbit_mgmt_sup, start_link, []},
diff --git a/debian/patches/fix-pmon-demonitor-function.diff b/debian/patches/fix-pmon-demonitor-function.diff
deleted file mode 100644 (file)
index 5f114b3..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-Description: Fix internal state corruption introduced by pmon:demonitor/2
-Author: Alexey Lebedeff <alebedev@mirantis.com>
-Origin: backport, https://github.com/rabbitmq/rabbitmq-common/pull/18
-Applied-Upstream: 3.5.7, 3.6.0
----
-This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
-Index: rabbitmq-server/src/pmon.erl
-===================================================================
---- rabbitmq-server.orig/src/pmon.erl
-+++ rabbitmq-server/src/pmon.erl
-@@ -84,7 +84,7 @@ demonitor(Item, S = #state{dict = M, mod
-     case dict:find(Item, M) of
-         {ok, MRef} -> Module:demonitor(MRef),
-                       S#state{dict = dict:erase(Item, M)};
--        error      -> M
-+        error      -> S
-     end.
- is_monitored(Item, #state{dict = M}) -> dict:is_key(Item, M).
diff --git a/debian/patches/policy_location.patch b/debian/patches/policy_location.patch
new file mode 100644 (file)
index 0000000..f18c459
--- /dev/null
@@ -0,0 +1,11 @@
+--- a/scripts/rabbitmq-server-ha.ocf
++++ b/scripts/rabbitmq-server-ha.ocf
+@@ -47,7 +47,7 @@ OCF_RESKEY_erlang_cookie_file_default="/
+ OCF_RESKEY_use_fqdn_default=false
+ OCF_RESKEY_fqdn_prefix_default=""
+ OCF_RESKEY_max_rabbitmqctl_timeouts_default=3
+-OCF_RESKEY_policy_file_default="/usr/local/sbin/set_rabbitmq_policy"
++OCF_RESKEY_policy_file_default="/usr/lib/ocf/resource.d/fuel/set_rabbitmq_policy.sh"
+ OCF_RESKEY_rmq_feature_health_check_default=true
+ OCF_RESKEY_rmq_feature_local_list_queues_default=true
diff --git a/debian/patches/revert_private_attributes.patch b/debian/patches/revert_private_attributes.patch
new file mode 100644 (file)
index 0000000..545c859
--- /dev/null
@@ -0,0 +1,20 @@
+--- a/scripts/rabbitmq-server-ha.ocf
++++ b/scripts/rabbitmq-server-ha.ocf
+@@ -1811,7 +1811,7 @@ ocf_get_private_attr() {
+     local attr_default_value="${2:?}"
+     local nodename="${3:-$THIS_PCMK_NODE}"
+     local count
+-    count=$(attrd_updater -p --name "$attr_name" --node "$nodename" --query)
++    count=$(crm_attribute -N "$nodename" -l reboot --name "$attr_name" --query)
+     if [ $? -ne 0 ]; then
+         echo $attr_default_value
+     else
+@@ -1823,7 +1823,7 @@ ocf_update_private_attr() {
+     local attr_name="${1:?}"
+     local attr_value="${2:?}"
+     local nodename="${3:-$THIS_PCMK_NODE}"
+-    ocf_run attrd_updater -p --name "$attr_name" --node "$nodename" --update "$attr_value"
++    ocf_run crm_attribute -N "$nodename" -l reboot --name "$attr_name" --update "$attr_value"
+ }
+ rabbitmqctl_with_timeout_check() {
index c1b8dbe36e8c22246a3978cb172955e83c598819..f99b7b1c21d3ef6d7f6f2667feedb3a7eb75af40 100644 (file)
@@ -1,4 +1,2 @@
-fix-pmon-demonitor-function.diff
-fix-management-startup-after-split.diff
-detect-stuck-queue-on-declare.diff
-erlang-18-segfault.diff
+revert_private_attributes.patch
+policy_location.patch
similarity index 95%
rename from debian/rabbitmq-server.postinst
rename to debian/postinst
index b11340ef8a296dc93fbbf9f21b3b7c7008822d26..c83881e6bad4f0fe86141f7ac7fde34c7839a824 100644 (file)
@@ -32,6 +32,8 @@ fi
 
 chown -R rabbitmq:rabbitmq /var/lib/rabbitmq
 chown -R rabbitmq:rabbitmq /var/log/rabbitmq
+chmod 750 /var/lib/rabbitmq/mnesia
+chmod -R o-rwx,g-w /var/lib/rabbitmq/mnesia
 
 case "$1" in
     configure)
similarity index 70%
rename from debian/rabbitmq-server.postrm
rename to debian/postrm.in
index c2e9bbfedcae838045572dffa89871dd9654e548..e62325a496e8dfe77768680cbd09d586c61d9863 100644 (file)
@@ -60,6 +60,29 @@ esac
 
 #DEBHELPER#
 
-exit 0
 
 
+this_version=3.6.6-1~u14.04+mos1
+diversion_added_version=3.6.6-1~u14.04+mos1
+
+losing_diversion=n
+
+if test "$1" = failed-upgrade
+then
+        dpkg --compare-versions "$2" le-nl "$this_version" ||
+        # An upgrade from a newer version failed.
+        # There is no way for us to know enough to take over from here,
+        # so abort the upgrade.
+        exit 1
+elif dpkg --compare-versions "$2" lt-nl "$diversion_added_version"
+then
+        losing_diversion=y
+fi
+
+case "$1,$losing_diversion" in
+    remove,*|abort-install,*|disappear,*|*,y)
+        dpkg-divert --remove --rename /usr/lib/ocf/resource.d/fuel/rabbitmq-server-upstream
+        ;;
+esac
+
+exit 0
diff --git a/debian/preinst b/debian/preinst
new file mode 100644 (file)
index 0000000..9e2e22a
--- /dev/null
@@ -0,0 +1,11 @@
+#!/bin/sh
+this_version=3.6.6-1~u14.04+mos1
+diversion_added_version=3.6.6-1~u14.04+mos1
+
+if
+        test "$1" = install ||
+        dpkg --compare-versions "$2" lt "$diversion_added_version" ||
+        dpkg --compare-versions "$this_version" lt "$2"
+then
+    dpkg-divert --add --rename /usr/lib/ocf/resource.d/fuel/rabbitmq-server-upstream
+fi
diff --git a/debian/rabbitmq-env.conf b/debian/rabbitmq-env.conf
deleted file mode 100644 (file)
index bebe2ab..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-# Defaults to rabbit. This can be useful if you want to run more than one node
-# per machine - RABBITMQ_NODENAME should be unique per erlang-node-and-machine
-# combination. See the clustering on a single machine guide for details:
-# http://www.rabbitmq.com/clustering.html#single-machine
-#NODENAME=rabbit
-
-# By default RabbitMQ will bind to all interfaces, on IPv4 and IPv6 if
-# available. Set this if you only want to bind to one network interface or#
-# address family.
-#NODE_IP_ADDRESS=127.0.0.1
-
-# Defaults to 5672.
-#NODE_PORT=5672
diff --git a/debian/rabbitmq-server-wait b/debian/rabbitmq-server-wait
deleted file mode 100755 (executable)
index cdf53e5..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh -e
-##  The contents of this file are subject to the Mozilla Public License
-##  Version 1.1 (the "License"); you may not use this file except in
-##  compliance with the License. You may obtain a copy of the License
-##  at http://www.mozilla.org/MPL/
-##
-##  Software distributed under the License is distributed on an "AS IS"
-##  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-##  the License for the specific language governing rights and
-##  limitations under the License.
-##
-##  The Original Code is RabbitMQ.
-##
-##  The Initial Developer of the Original Code is GoPivotal, Inc.
-##  Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-##
-
-# Get default settings with user overrides for (RABBITMQ_)<var_name>
-# Non-empty defaults should be set in rabbitmq-env
-. `dirname $0`/rabbitmq-env
-
-/usr/lib/rabbitmq/bin/rabbitmqctl wait $RABBITMQ_PID_FILE
diff --git a/debian/rabbitmq-server.dirs b/debian/rabbitmq-server.dirs
deleted file mode 100644 (file)
index e6127a0..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-usr/lib/erlang/lib
-var/lib/rabbitmq/mnesia
-var/log/rabbitmq
diff --git a/debian/rabbitmq-server.docs b/debian/rabbitmq-server.docs
new file mode 100644 (file)
index 0000000..6055402
--- /dev/null
@@ -0,0 +1,2 @@
+deps/rabbit/docs/rabbitmq.config.example
+deps/rabbit/docs/set_rabbitmq_policy.sh.example
index b2d3f86ab3035116e182722bf74bde99eab1de05..fce2d1640156be910ca31063da2da2f9c5099eef 100644 (file)
@@ -3,7 +3,7 @@
 # rabbitmq-server RabbitMQ broker
 #
 # chkconfig: - 80 05
-# description: Enable AMQP service provided by RabbitMQ
+# description: Manages RabbitMQ server
 #
 
 ### BEGIN INIT INFO
@@ -13,7 +13,7 @@
 # Default-Start: 2 3 4 5
 # Default-Stop: 0 1 6
 # Description:       RabbitMQ broker
-# Short-Description: Enable AMQP service provided by RabbitMQ broker
+# Short-Description: Manages RabbitMQ server
 ### END INIT INFO
 
 PATH=/sbin:/usr/sbin:/bin:/usr/bin
@@ -23,9 +23,8 @@ CONTROL=/usr/sbin/rabbitmqctl
 DESC="message broker"
 USER=rabbitmq
 ROTATE_SUFFIX=
-INIT_LOG_DIR=/var/log/rabbitmq
 PID_FILE=/var/run/rabbitmq/pid
-
+RABBITMQ_ENV=/usr/lib/rabbitmq/bin/rabbitmq-env
 
 test -x $DAEMON || exit 0
 test -x $CONTROL || exit 0
@@ -35,6 +34,9 @@ set -e
 
 [ -f /etc/default/${NAME} ] && . /etc/default/${NAME}
 
+RABBITMQ_SCRIPTS_DIR=$(dirname "$RABBITMQ_ENV")
+. "$RABBITMQ_ENV"
+
 . /lib/lsb/init-functions
 . /lib/init/vars.sh
 
@@ -60,7 +62,7 @@ start_rabbitmq () {
         set +e
         RABBITMQ_PID_FILE=$PID_FILE start-stop-daemon --quiet \
             --chuid rabbitmq --start --exec $DAEMON \
-            --pidfile "$RABBITMQ_PID_FILE" --background
+            --pidfile "$PID_FILE" --background
         $CONTROL wait $PID_FILE >/dev/null 2>&1
         RETVAL=$?
         set -e
@@ -76,7 +78,9 @@ stop_rabbitmq () {
     status_rabbitmq quiet
     if [ $RETVAL = 0 ] ; then
         set +e
-        $CONTROL stop ${PID_FILE} > ${INIT_LOG_DIR}/shutdown_log 2> ${INIT_LOG_DIR}/shutdown_err
+        $CONTROL stop ${PID_FILE} \
+            > ${RABBITMQ_LOG_BASE}/shutdown_log \
+            2> ${RABBITMQ_LOG_BASE}/shutdown_err
         RETVAL=$?
         set -e
         if [ $RETVAL = 0 ] ; then
@@ -143,7 +147,7 @@ start_stop_end() {
             RETVAL=0
             ;;
         *)
-            log_warning_msg "FAILED - check ${INIT_LOG_DIR}/startup_\{log, _err\}"
+            log_warning_msg "FAILED - check ${RABBITMQ_LOG_BASE}/startup_\{log, _err\}"
             log_end_msg 1
             ;;
     esac
diff --git a/debian/rabbitmq-server.install b/debian/rabbitmq-server.install
deleted file mode 100644 (file)
index 902f3dd..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-debian/ocf/rabbitmq-server /usr/lib/ocf/resource.d/rabbitmq/
-debian/rabbitmq-server-wait /usr/lib/rabbitmq/bin
-debian/rabbitmq-script-wrapper /usr/lib/rabbitmq/bin
-debian/rabbitmq-env.conf       /etc/rabbitmq
diff --git a/debian/rabbitmq-server.links b/debian/rabbitmq-server.links
deleted file mode 100644 (file)
index 0bfa1c5..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-/usr/lib/rabbitmq/bin/rabbitmq-script-wrapper /usr/sbin/rabbitmqctl
-/usr/lib/rabbitmq/bin/rabbitmq-script-wrapper /usr/sbin/rabbitmq-server
-/usr/lib/rabbitmq/bin/rabbitmq-script-wrapper /usr/sbin/rabbitmq-plugins
diff --git a/debian/rabbitmq-server.manpages b/debian/rabbitmq-server.manpages
new file mode 100644 (file)
index 0000000..f2ca632
--- /dev/null
@@ -0,0 +1,4 @@
+deps/rabbit/docs/rabbitmq-env.conf.5
+deps/rabbit/docs/rabbitmq-plugins.1
+deps/rabbit/docs/rabbitmq-server.1
+deps/rabbit/docs/rabbitmqctl.1
index faa73c1b4ff4db072910c291f9e5176f786b11c0..1aa6549b64b722e330b1210753f5b2687ed187ef 100644 (file)
@@ -1,15 +1,18 @@
+# systemd unit example
 [Unit]
-Description=RabbitMQ Messaging Server
-After=network.target
+Description=RabbitMQ broker
+After=network.target epmd@0.0.0.0.socket
+Wants=network.target epmd@0.0.0.0.socket
 
 [Service]
-Type=simple
+Type=notify
 User=rabbitmq
-SyslogIdentifier=rabbitmq
-LimitNOFILE=65536
-ExecStart=/usr/sbin/rabbitmq-server
-ExecStartPost=/usr/lib/rabbitmq/bin/rabbitmq-server-wait
-ExecStop=/usr/sbin/rabbitmqctl stop
+Group=rabbitmq
+NotifyAccess=all
+TimeoutStartSec=3600
+WorkingDirectory=/var/lib/rabbitmq
+ExecStart=/usr/lib/rabbitmq/bin/rabbitmq-server
+ExecStop=/usr/lib/rabbitmq/bin/rabbitmqctl stop
 
 [Install]
 WantedBy=multi-user.target
index bee4e81aaf2756e92e4d4bc3f6b3c70d28dc6839..f79dc5e3add8a9eb07091e35f026804a8ce8ee2c 100755 (executable)
@@ -1,10 +1,63 @@
 #!/usr/bin/make -f
 # -*- makefile -*-
+
+# Uncomment this to turn on verbose mode.
 #export DH_VERBOSE=1
 
+DEB_DESTDIR = debian/rabbitmq-server
+VERSION = $(shell dpkg-parsechangelog | awk '/^Version:/ {version=$$0; sub(/Version: /, "", version); sub(/-.*/, "", version); print version;}')
+
+unexport DEPS_DIR
+
 %:
        dh $@ --parallel --with systemd
 
+override_dh_auto_clean:
+       $(MAKE) clean clean-unpacked-source-dist distclean-manpages
+       rm -rf .erlang.mk
+
+override_dh_auto_build:
+       $(MAKE) dist manpages
+
+override_dh_auto_test:
+       @:
+
+export PREFIX RMQ_ROOTDIR
+
+override_dh_auto_install: PREFIX = /usr
+override_dh_auto_install: RMQ_ROOTDIR = $(PREFIX)/lib/rabbitmq
+override_dh_auto_install: RMQ_ERLAPP_DIR = $(RMQ_ROOTDIR)/lib/rabbitmq_server-$(VERSION)
+override_dh_auto_install:
+       dh_auto_install -- VERSION=$(VERSION)
+
+       $(MAKE) install-bin DESTDIR=$(DEB_DESTDIR) VERSION=$(VERSION)
+
+       sed -e 's|@RABBIT_LIB@|$(RMQ_ERLAPP_DIR)|g' \
+               < debian/postrm.in > debian/postrm
+
+       sed -e 's|@SU_RABBITMQ_SH_C@|su rabbitmq -s /bin/sh -c|' \
+               -e 's|@STDOUT_STDERR_REDIRECTION@|> "$$RABBITMQ_LOG_BASE/startup_log" 2> "$$RABBITMQ_LOG_BASE/startup_err"|' \
+               < scripts/rabbitmq-script-wrapper \
+               > $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl
+       chmod 0755 $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl
+       for script in rabbitmq-server rabbitmq-plugins; do \
+               cp -a $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl \
+                $(DEB_DESTDIR)$(PREFIX)/sbin/$$script; \
+       done
+
+       install -p -D -m 0644 debian/rabbitmq-server.default \
+               $(DEB_DESTDIR)/etc/default/rabbitmq-server
+
+       install -p -D -m 0755 scripts/rabbitmq-server.ocf \
+               $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/rabbitmq/rabbitmq-server
+       install -p -D -m 0755 scripts/rabbitmq-server-ha.ocf \
+               $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/fuel/rabbitmq-server-upstream
+
+       rm $(DEB_DESTDIR)$(RMQ_ERLAPP_DIR)/LICENSE* \
+               $(DEB_DESTDIR)$(RMQ_ERLAPP_DIR)/INSTALL
+
+       rmdir $(DEB_DESTDIR)$(PREFIX)/lib/erlang/lib \
+               $(DEB_DESTDIR)$(PREFIX)/lib/erlang
 override_dh_systemd_enable:
        dh_systemd_enable --no-enable
 
@@ -13,19 +66,3 @@ override_dh_systemd_start:
 
 override_dh_installinit:
        dh_installinit --noscripts
-
-
-DEB_UPSTREAM_VERSION=$(shell dpkg-parsechangelog | sed -rne 's,^Version: ([^+]+)-.*,\1,p')
-DEB_DESTDIR=debian/rabbitmq-server
-RABBIT_LIB=$(DEB_DESTDIR)/usr/lib/rabbitmq/lib/rabbitmq_server-$(DEB_UPSTREAM_VERSION)
-RABBIT_BIN=$(DEB_DESTDIR)/usr/lib/rabbitmq/bin
-DOCDIR=$(DEB_DESTDIR)/usr/share/doc/rabbitmq-server
-
-override_dh_auto_install:
-       dh_auto_install -- TARGET_DIR=$(RABBIT_LIB) SBIN_DIR=$(RABBIT_BIN) \
-               DOC_INSTALL_DIR=$(DOCDIR) MAN_DIR=$(DEB_DESTDIR)/usr/share/man
-       rm -f $(RABBIT_LIB)/LICENSE* $(RABBIT_LIB)/INSTALL*
-
-override_dh_auto_clean:
-       rm -f plugins-src/rabbitmq-server plugins/README
-       dh_auto_clean
index e41153d6127aea21226109334dc2289673df1b2d..b41aff9aedc1003866091af62a3d65c914776e34 100644 (file)
@@ -1,2 +1,4 @@
 version=3
-http://www.rabbitmq.com/releases/rabbitmq-server/v(.*)/rabbitmq-server-(\d.*)\.tar\.gz
+
+http://www.rabbitmq.com/releases/rabbitmq-server/v(.*)/rabbitmq-server-(\d.*)\.tar\.gz \
+       debian uupdate
diff --git a/deps/amqp_client/CODE_OF_CONDUCT.md b/deps/amqp_client/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
similarity index 72%
rename from rabbitmq-server/codegen/CONTRIBUTING.md
rename to deps/amqp_client/CONTRIBUTING.md
index 69a4b4a437fdf25c45c200610d780c7a009146be..45bbcbe62e74c1a8682d2097db8eec955d177b9c 100644 (file)
@@ -20,22 +20,9 @@ If what you are going to work on is a substantial change, please first ask the c
 of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
 
 
-## (Brief) Code of Conduct
+## Code of Conduct
 
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
 
 
 ## Contributor Agreement
diff --git a/deps/amqp_client/Makefile b/deps/amqp_client/Makefile
new file mode 100644 (file)
index 0000000..8e7399f
--- /dev/null
@@ -0,0 +1,164 @@
+PROJECT = amqp_client
+VERSION ?= $(call get_app_version,src/$(PROJECT).app.src)
+ifeq ($(VERSION),)
+VERSION = 0.0.0
+endif
+
+# Release artifacts are put in $(PACKAGES_DIR).
+PACKAGES_DIR ?= $(abspath PACKAGES)
+
+DEPS = rabbit_common
+TEST_DEPS = rabbitmq_ct_helpers rabbit
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk \
+             rabbit_common/mk/rabbitmq-dist.mk \
+             rabbit_common/mk/rabbitmq-run.mk \
+             rabbit_common/mk/rabbitmq-tools.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
+
+# --------------------------------------------------------------------
+# Distribution.
+# --------------------------------------------------------------------
+
+.PHONY: distribution
+
+distribution: docs source-dist package
+
+docs:: edoc
+edoc: doc/overview.edoc
+
+doc/overview.edoc: src/overview.edoc.in
+       mkdir -p doc
+       sed -e 's:%%VERSION%%:$(VERSION):g' < $< > $@
+
+.PHONY: source-dist clean-source-dist
+
+SOURCE_DIST_BASE ?= $(PROJECT)
+SOURCE_DIST_SUFFIXES ?= tar.xz zip
+SOURCE_DIST ?= $(PACKAGES_DIR)/$(SOURCE_DIST_BASE)-$(VERSION)-src
+
+# The first source distribution file is used by packages: if the archive
+# type changes, you must update all packages' Makefile.
+SOURCE_DIST_FILES = $(addprefix $(SOURCE_DIST).,$(SOURCE_DIST_SUFFIXES))
+
+.PHONY: $(SOURCE_DIST_FILES)
+
+source-dist: $(SOURCE_DIST_FILES)
+       @:
+
+RSYNC ?= rsync
+RSYNC_V_0 =
+RSYNC_V_1 = -v
+RSYNC_V_2 = -v
+RSYNC_V = $(RSYNC_V_$(V))
+RSYNC_FLAGS += -a $(RSYNC_V)           \
+              --exclude '.sw?' --exclude '.*.sw?'      \
+              --exclude '*.beam'                       \
+              --exclude '*.pyc'                        \
+              --exclude '.git*'                        \
+              --exclude '.hg*'                         \
+              --exclude '.travis.yml'                  \
+              --exclude '$(notdir $(ERLANG_MK_TMP))'   \
+              --exclude 'ebin'                         \
+              --exclude 'erl_crash.dump'               \
+              --exclude 'deps/'                        \
+              --exclude '$(notdir $(DEPS_DIR))/'       \
+              --exclude 'doc/'                         \
+              --exclude 'plugins/'                     \
+              --exclude '$(notdir $(DIST_DIR))/'       \
+              --exclude '/$(notdir $(PACKAGES_DIR))/'  \
+              --delete                                 \
+              --delete-excluded
+
+TAR ?= tar
+TAR_V_0 =
+TAR_V_1 = -v
+TAR_V_2 = -v
+TAR_V = $(TAR_V_$(V))
+
+GZIP ?= gzip
+BZIP2 ?= bzip2
+XZ ?= xz
+
+ZIP ?= zip
+ZIP_V_0 = -q
+ZIP_V_1 =
+ZIP_V_2 =
+ZIP_V = $(ZIP_V_$(V))
+
+.PHONY: $(SOURCE_DIST)
+
+$(SOURCE_DIST): $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+       $(verbose) mkdir -p $(dir $@)
+       $(gen_verbose) $(RSYNC) $(RSYNC_FLAGS) ./ $@/
+       $(verbose) sed -E -i.bak \
+               -e 's/[{]vsn[[:blank:]]*,[^}]+}/{vsn, "$(VERSION)"}/' \
+               $@/src/$(PROJECT).app.src && \
+               rm $@/src/$(PROJECT).app.src.bak
+       $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST) | grep -v '/$(PROJECT)$$' | LC_COLLATE=C sort); do \
+               $(RSYNC) $(RSYNC_FLAGS) \
+                $$dep \
+                $@/deps; \
+               if test -f $@/deps/$$(basename $$dep)/erlang.mk && \
+                  test "$$(wc -l $@/deps/$$(basename $$dep)/erlang.mk | awk '{print $$1;}')" = "1" && \
+                  grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" $@/deps/$$(basename $$dep)/erlang.mk; then \
+                       echo "include ../../erlang.mk" > $@/deps/$$(basename $$dep)/erlang.mk; \
+               fi; \
+               sed -E -i.bak "s|^[[:blank:]]*include[[:blank:]]+\.\./.*erlang.mk$$|include ../../erlang.mk|" \
+                $@/deps/$$(basename $$dep)/Makefile && \
+               rm $@/deps/$$(basename $$dep)/Makefile.bak; \
+       done
+       $(verbose) for file in $$(find $@ -name '*.app.src'); do \
+               sed -E -i.bak -e 's/[{]vsn[[:blank:]]*,[[:blank:]]*""[[:blank:]]*}/{vsn, "$(VERSION)"}/' $$file; \
+               rm $$file.bak; \
+       done
+       $(verbose) echo "$(PROJECT) $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)" > $@/git-revisions.txt
+       $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST)); do \
+               (cd $$dep; echo "$$(basename "$$dep") $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)") >> $@/git-revisions.txt; \
+       done
+       $(verbose) rm $@/README.in
+       $(verbose) cp README.in $@/README
+       $(verbose) cat "$(BUILD_DOC)" >> $@/README
+
+# TODO: Fix file timestamps to have reproducible source archives.
+# $(verbose) find $@ -not -name 'git-revisions.txt' -print0 | xargs -0 touch -r $@/git-revisions.txt
+
+$(SOURCE_DIST).tar.gz: $(SOURCE_DIST)
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \
+               $(GZIP) --best > $@
+
+$(SOURCE_DIST).tar.bz2: $(SOURCE_DIST)
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \
+               $(BZIP2) > $@
+
+$(SOURCE_DIST).tar.xz: $(SOURCE_DIST)
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \
+               $(XZ) > $@
+
+$(SOURCE_DIST).zip: $(SOURCE_DIST)
+       $(verbose) rm -f $@
+       $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \
+               find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \
+               xargs -0 $(ZIP) $(ZIP_V) $@
+
+clean:: clean-source-dist
+
+clean-source-dist:
+       $(gen_verbose) rm -rf -- $(SOURCE_DIST_BASE)-*
+
+package: dist
+       cp $(DIST_DIR)/*.ez $(PACKAGES_DIR)
diff --git a/deps/amqp_client/ci/test.sh b/deps/amqp_client/ci/test.sh
new file mode 100755 (executable)
index 0000000..a88f91c
--- /dev/null
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+set -ex
+
+SCRIPT=$0
+SCRIPT_DIR=$(cd $(dirname "$SCRIPT") && pwd)
+SRC_DIR=$(cd "$SCRIPT_DIR/.." && pwd)
+DEPS_DIR=$(cd "$SRC_DIR/.." && pwd)
+
+case $(uname -s) in
+FreeBSD) MAKE=gmake ;;
+*)       MAKE=make ;;
+esac
+
+(
+  cd "$SRC_DIR"
+  $MAKE dep_ranch="cp /ranch" DEPS_DIR="$DEPS_DIR" tests
+)
diff --git a/deps/amqp_client/ci/test.yml b/deps/amqp_client/ci/test.yml
new file mode 100644 (file)
index 0000000..1449b6a
--- /dev/null
@@ -0,0 +1,12 @@
+---
+platform: linux
+inputs:
+- name: amqp_client
+- name: rabbit_common
+- name: rabbit
+image_resource:
+  type: docker-image
+  source:
+    repository: pivotalrabbitmq/ci
+run:
+  path: amqp_client/ci/test.sh
diff --git a/deps/amqp_client/erlang.mk b/deps/amqp_client/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client.hrl
rename to deps/amqp_client/include/amqp_client.hrl
index bc74dd7de317e0c261d0539fb59d9d2aa8a58409..58ccc4985d562a127d90a17740572f96c2e3cc19 100644 (file)
@@ -29,7 +29,7 @@
                               port               = undefined,
                               channel_max        = 0,
                               frame_max          = 0,
-                              heartbeat          = 0,
+                              heartbeat          = 10,
                               connection_timeout = infinity,
                               ssl_options        = none,
                               auth_mechanisms    =
diff --git a/deps/amqp_client/include/amqp_gen_consumer_spec.hrl b/deps/amqp_client/include/amqp_gen_consumer_spec.hrl
new file mode 100644 (file)
index 0000000..c462a9c
--- /dev/null
@@ -0,0 +1,41 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2011-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-include("amqp_client.hrl").
+
+-type state() :: any().
+-type consume() :: #'basic.consume'{}.
+-type consume_ok() :: #'basic.consume_ok'{}.
+-type cancel() :: #'basic.cancel'{}.
+-type cancel_ok() :: #'basic.cancel_ok'{}.
+-type deliver() :: #'basic.deliver'{}.
+-type from() :: any().
+-type reason() :: any().
+-type ok_error() :: {ok, state()} | {error, reason(), state()}.
+
+-spec init([any()]) -> {ok, state()}.
+-spec handle_consume(consume(), pid(), state()) -> ok_error().
+-spec handle_consume_ok(consume_ok(), consume(), state()) ->
+                                  ok_error().
+-spec handle_cancel(cancel(), state()) -> ok_error().
+-spec handle_server_cancel(cancel(), state()) -> ok_error().
+-spec handle_cancel_ok(cancel_ok(), cancel(), state()) -> ok_error().
+-spec handle_deliver(deliver(), #amqp_msg{}, state()) -> ok_error().
+-spec handle_info(any(), state()) -> ok_error().
+-spec handle_call(any(), from(), state()) ->
+                           {reply, any(), state()} | {noreply, state()} |
+                            {error, reason(), state()}.
+-spec terminate(any(), state()) -> state().
diff --git a/deps/amqp_client/rabbitmq-components.mk b/deps/amqp_client/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_auth_mechanisms.erl
rename to deps/amqp_client/src/amqp_auth_mechanisms.erl
index 9192cad6f816ec00d21d8ee14d9e46ae80373333..5517811157feadea14f4c41fcd06863a4a477794 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel.erl
rename to deps/amqp_client/src/amqp_channel.erl
index 11217954b61fd52d5eb72a88be9371d2dc7b84d0..fd1e631cafe2268d0e4cd596c0dee5905cbf90fd 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @type close_reason(Type) = {shutdown, amqp_reason(Type)}.
@@ -560,7 +560,9 @@ handle_method_to_server(Method, AmqpMsg, From, Sender, Flow,
           check_block(Method, AmqpMsg, State)} of
         {ok, _, ok} ->
             State1 = case {Method, State#state.next_pub_seqno} of
-                         {#'confirm.select'{}, _} ->
+                         {#'confirm.select'{}, 0} ->
+                             %% The confirm seqno is set to 1 on the
+                             %% first confirm.select only.
                              State#state{next_pub_seqno = 1};
                          {#'basic.publish'{}, 0} ->
                              State;
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup.erl
rename to deps/amqp_client/src/amqp_channel_sup.erl
index 8fc4d8fb7db4cff321a2de24c53ab2aa9230dbb8..547b9fda21629ed5061eb9049e707a3ad1a621be 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
@@ -37,7 +37,7 @@ start_link(Type, Connection, ConnName, InfraArgs, ChNumber,
                     Sup, {channel,
                           {amqp_channel, start_link,
                            [Type, Connection, ChNumber, ConsumerPid, Identity]},
-                          intrinsic, ?MAX_WAIT, worker, [amqp_channel]}),
+                          intrinsic, ?WORKER_WAIT, worker, [amqp_channel]}),
     Writer = start_writer(Sup, Type, InfraArgs, ConnName, ChNumber, ChPid),
     amqp_channel:set_writer(ChPid, Writer),
     {ok, AState} = init_command_assembler(Type),
@@ -60,7 +60,7 @@ start_writer(Sup, network, [Sock, FrameMax], ConnName, ChNumber, ChPid) ->
                      {writer, {rabbit_writer, start_link,
                                [Sock, ChNumber, FrameMax, ?PROTOCOL, ChPid,
                                 {ConnName, ChNumber}]},
-                      intrinsic, ?MAX_WAIT, worker, [rabbit_writer]}),
+                      transient, ?WORKER_WAIT, worker, [rabbit_writer]}),
     Writer.
 
 init_command_assembler(direct)  -> {ok, none};
@@ -74,4 +74,4 @@ init([{ConsumerModule, ConsumerArgs}, Identity]) ->
     {ok, {{one_for_all, 0, 1},
           [{gen_consumer, {amqp_gen_consumer, start_link,
                            [ConsumerModule, ConsumerArgs, Identity]},
-           intrinsic, ?MAX_WAIT, worker, [amqp_gen_consumer]}]}}.
+           intrinsic, ?WORKER_WAIT, worker, [amqp_gen_consumer]}]}}.
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup_sup.erl
rename to deps/amqp_client/src/amqp_channel_sup_sup.erl
index 6d461e1f743122b35123bb0b145b08762e96963b..a206c9f4b7525eb18c5a45d71c233d3d095c9b41 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channels_manager.erl
rename to deps/amqp_client/src/amqp_channels_manager.erl
index b260cd265cbd3c102cae3753f308b3d89e74b362..ceb223a64af926baa01b17e20b7f5da1b09ef690 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
similarity index 72%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/ebin/amqp_client.app.in
rename to deps/amqp_client/src/amqp_client.app.src
index c9c96add7789acf2ac4e81290ead1cedd7c35362..959e58c2ac1f8c4fcffa2d7bbd13da4ffbd36c61 100644 (file)
@@ -1,9 +1,9 @@
 {application, amqp_client,
  [{description, "RabbitMQ AMQP Client"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, [amqp_sup]},
   {env, [{prefer_ipv6, false},
          {ssl_options, []}]},
   {mod, {amqp_client, []}},
-  {applications, [kernel, stdlib, xmerl]}]}.
+  {applications, [kernel, stdlib, xmerl, rabbit_common]}]}.
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_client.erl
rename to deps/amqp_client/src/amqp_client.erl
index 83905d09a82650c5f88e6ca46cbdd37f0597547c..0e0d1418eb47d034f08c34c1a70fe31fa3a8d1d6 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
@@ -27,6 +27,7 @@
 %%---------------------------------------------------------------------------
 
 start() ->
+    application:start(rabbit_common),
     application:start(amqp_client).
 
 %%---------------------------------------------------------------------------
similarity index 85%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection.erl
rename to deps/amqp_client/src/amqp_connection.erl
index 371b22567adbac9be282845838cf5069941d2929..0dc0b7076c883af97a883daef60a9069c56d9129 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @type close_reason(Type) = {shutdown, amqp_reason(Type)}.
 -include("amqp_client_internal.hrl").
 
 -export([open_channel/1, open_channel/2, open_channel/3, register_blocked_handler/2]).
--export([start/1, close/1, close/2, close/3]).
+-export([start/1, start/2, close/1, close/2, close/3, close/4]).
 -export([error_atom/1]).
 -export([info/2, info_keys/1, info_keys/0]).
+-export([connection_name/1]).
 -export([socket_adapter_info/2]).
 
 -define(DEFAULT_CONSUMER, {amqp_selective_consumer, []}).
 %% where
 %%      Params = amqp_params_network() | amqp_params_direct()
 %%      Connection = pid()
+%% @doc same as {@link amqp_connection:start/2. start(Params, undefined)}
+start(AmqpParams) ->
+    start(AmqpParams, undefined).
+
+%% @spec (Params, ConnectionName) -> {ok, Connection} | {error, Error}
+%% where
+%%      Params = amqp_params_network() | amqp_params_direct()
+%%      ConnectionName = undefined | binary()
+%%      Connection = pid()
 %% @doc Starts a connection to an AMQP server. Use network params to
 %% connect to a remote AMQP server or direct params for a direct
 %% connection to a RabbitMQ server, assuming that the server is
 %% running in the same process space.  If the port is set to 'undefined',
 %% the default ports will be selected depending on whether this is a
 %% normal or an SSL connection.
-start(AmqpParams) ->
+%% If ConnectionName is binary - it will be added to client_properties as 
+%% user specified connection name.
+start(AmqpParams, ConnName) when ConnName == undefined; is_binary(ConnName) ->
     ensure_started(),
     AmqpParams1 =
         case AmqpParams of
@@ -158,9 +170,24 @@ start(AmqpParams) ->
             _ ->
                 AmqpParams
         end,
-    {ok, _Sup, Connection} = amqp_sup:start_connection_sup(AmqpParams1),
+    AmqpParams2 = set_connection_name(ConnName, AmqpParams1),
+    {ok, _Sup, Connection} = amqp_sup:start_connection_sup(AmqpParams2),
     amqp_gen_connection:connect(Connection).
 
+set_connection_name(undefined, Params) -> Params;
+set_connection_name(ConnName, 
+                    #amqp_params_network{client_properties = Props} = Params) ->
+    Params#amqp_params_network{
+        client_properties = [
+            {<<"connection_name">>, longstr, ConnName} | Props
+        ]};
+set_connection_name(ConnName, 
+                    #amqp_params_direct{client_properties = Props} = Params) ->
+    Params#amqp_params_direct{
+        client_properties = [
+            {<<"connection_name">>, longstr, ConnName} | Props
+        ]}.
+
 %% Usually the amqp_client application will already be running. We
 %% check whether that is the case by invoking an undocumented function
 %% which does not require a synchronous call to the application
@@ -168,16 +195,16 @@ start(AmqpParams) ->
 %% application controller is in the process of shutting down the very
 %% application which is making this call.
 ensure_started() ->
-    [ensure_started(App) || App <- [xmerl, amqp_client]].
+    [ensure_started(App) || App <- [xmerl, rabbit_common, amqp_client]].
 
 ensure_started(App) ->
-    case application_controller:get_master(App) of
-        undefined -> case application:start(App) of
-                         ok                              -> ok;
-                         {error, {already_started, App}} -> ok;
-                         {error, _} = E                  -> throw(E)
-                     end;
-        _         -> ok
+    case is_pid(application_controller:get_master(App)) andalso amqp_sup:is_ready() of
+        true  -> ok;
+        false -> case application:start(App) of
+                     ok                              -> ok;
+                     {error, {already_started, App}} -> ok;
+                     {error, _} = E                  -> throw(E)
+                 end
     end.
 
 %%---------------------------------------------------------------------------
@@ -342,3 +369,18 @@ info_keys() ->
 %% based on the socket for the protocol given.
 socket_adapter_info(Sock, Protocol) ->
     amqp_direct_connection:socket_adapter_info(Sock, Protocol).
+
+%% @spec (ConnectionPid) -> ConnectionName
+%% where
+%%      ConnectionPid = pid()
+%%      ConnectionName = binary()
+%% @doc Returns user specified connection name from client properties
+connection_name(ConnectionPid) ->
+    ClientProperties = case info(ConnectionPid, [amqp_params]) of
+        [{_, #amqp_params_network{client_properties = Props}}] -> Props;
+        [{_, #amqp_params_direct{client_properties = Props}}] -> Props
+    end,
+    case lists:keyfind(<<"connection_name">>, 1, ClientProperties) of
+        {<<"connection_name">>, _, ConnName} -> ConnName;
+        false                                -> undefined
+    end.
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_sup.erl
rename to deps/amqp_client/src/amqp_connection_sup.erl
index 7bc8a2d225e445abc8a52b521e848d8b0aa8ca7e..be9da63b6b87346233aec21b8482cd46fbc1e44a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
similarity index 89%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_type_sup.erl
rename to deps/amqp_client/src/amqp_connection_type_sup.erl
index 58023757c51709e8abfe3c62c088a3113d402a6a..636e81a4814bf3231131323acb15b268c98e02fc 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
@@ -48,7 +48,7 @@ start_channels_manager(Sup, Conn, ConnName, Type) ->
                 Sup,
                 {channels_manager, {amqp_channels_manager, start_link,
                                     [Conn, ConnName, ChSupSup]},
-                 transient, ?MAX_WAIT, worker, [amqp_channels_manager]}).
+                 transient, ?WORKER_WAIT, worker, [amqp_channels_manager]}).
 
 start_infrastructure_fun(Sup, Conn, network) ->
     fun (Sock, ConnName) ->
@@ -60,13 +60,13 @@ start_infrastructure_fun(Sup, Conn, network) ->
                   {writer,
                    {rabbit_writer, start_link,
                     [Sock, 0, ?FRAME_MIN_SIZE, ?PROTOCOL, Conn, ConnName]},
-                   transient, ?MAX_WAIT, worker, [rabbit_writer]}),
+                   transient, ?WORKER_WAIT, worker, [rabbit_writer]}),
             {ok, _Reader} =
                 supervisor2:start_child(
                   Sup,
                   {main_reader, {amqp_main_reader, start_link,
                                  [Sock, Conn, ChMgr, AState, ConnName]},
-                   transient, ?MAX_WAIT, worker, [amqp_main_reader]}),
+                   transient, ?WORKER_WAIT, worker, [amqp_main_reader]}),
             {ok, ChMgr, Writer}
     end;
 start_infrastructure_fun(Sup, Conn, direct) ->
@@ -76,7 +76,7 @@ start_infrastructure_fun(Sup, Conn, direct) ->
                 supervisor2:start_child(
                   Sup,
                   {collector, {rabbit_queue_collector, start_link, [ConnName]},
-                   transient, ?MAX_WAIT, worker, [rabbit_queue_collector]}),
+                   transient, ?WORKER_WAIT, worker, [rabbit_queue_collector]}),
             {ok, ChMgr, Collector}
     end.
 
similarity index 93%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_connection.erl
rename to deps/amqp_client/src/amqp_direct_connection.erl
index 5cd7df73872ea71c71e80ce7310eb12860b221c9..15491b82786ce436690ff55fb8edc3740add6449 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
@@ -108,6 +108,7 @@ i(port,         #state{adapter_info = I}) -> I#amqp_adapter_info.port;
 i(peer_host,    #state{adapter_info = I}) -> I#amqp_adapter_info.peer_host;
 i(peer_port,    #state{adapter_info = I}) -> I#amqp_adapter_info.peer_port;
 i(name,         #state{adapter_info = I}) -> I#amqp_adapter_info.name;
+i(internal_user, #state{user = U}) -> U;
 
 i(Item, _State) -> throw({bad_argument, Item}).
 
@@ -130,7 +131,8 @@ connect(Params = #amqp_params_direct{username     = Username,
                          vhost        = VHost,
                          params       = Params,
                          adapter_info = ensure_adapter_info(Info),
-                         connected_at = rabbit_misc:now_to_ms(os:timestamp())},
+                         connected_at =
+                           time_compat:os_system_time(milli_seconds)},
     case rpc:call(Node, rabbit_direct, connect,
                   [{Username, Password}, VHost, ?PROTOCOL, self(),
                    connection_info(State1)]) of
@@ -192,9 +194,14 @@ maybe_ssl_info(Sock) ->
 ssl_info(Sock) ->
     {Protocol, KeyExchange, Cipher, Hash} =
         case rabbit_net:ssl_info(Sock) of
-            {ok, {P, {K, C, H}}}    -> {P, K, C, H};
-            {ok, {P, {K, C, H, _}}} -> {P, K, C, H};
-            _                       -> {unknown, unknown, unknown, unknown}
+            {ok, Infos} ->
+                {_, P} = lists:keyfind(protocol, 1, Infos),
+                case lists:keyfind(cipher_suite, 1, Infos) of
+                    {_,{K, C, H}}    -> {P, K, C, H};
+                    {_,{K, C, H, _}} -> {P, K, C, H}
+                end;
+            _           ->
+                {unknown, unknown, unknown, unknown}
         end,
     [{ssl_protocol,     Protocol},
      {ssl_key_exchange, KeyExchange},
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_consumer.erl
rename to deps/amqp_client/src/amqp_direct_consumer.erl
index 34b14239f1297b3b95ea6e70e03e0572138943e7..793bfa3b866c1c2caac473511af17babd0753c8b 100644 (file)
@@ -93,6 +93,9 @@ handle_deliver(M, A, DeliveryCtx, C) ->
 
 
 %% @private
+handle_info({'DOWN', _MRef, process, C, normal}, C) ->
+    %% The channel was closed.
+    {ok, C};
 handle_info({'DOWN', _MRef, process, C, Info}, C) ->
     {error, {consumer_died, Info}, C};
 handle_info({'DOWN', MRef, process, Pid, Info}, C) ->
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_connection.erl
rename to deps/amqp_client/src/amqp_gen_connection.erl
index 55618ac8be6f254b3d5ee1b9800aa75cfb5f77a4..2829248f0fd530a6304d9a0f19a79ffecb024ff0 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_main_reader.erl
rename to deps/amqp_client/src/amqp_main_reader.erl
index b8e4ff962be0928af1900078f938d9061343baeb..33a69471c71eed2dfa0d1aaf2f0ea09a4560213a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_network_connection.erl
rename to deps/amqp_client/src/amqp_network_connection.erl
index 5edb44b4587303e7ccd2d38c0ba91b91a76e3e24..c08e9dbe4f1dd1d56c5be5a4bf515cee1cafcfc0 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
@@ -149,11 +149,10 @@ do_connect({Addr, Family},
                           orddict:merge(fun (_, _A, B) -> B end,
                                         orddict:from_list(GlobalSslOpts),
                                         orddict:from_list(SslOpts0)))),
-            case ssl:connect(Sock, SslOpts) of
+            case ssl:connect(Sock, SslOpts, Timeout) of
                 {ok, SslSock} ->
-                    RabbitSslSock = #ssl_socket{ssl = SslSock, tcp = Sock},
                     try_handshake(AmqpParams, SIF,
-                                  State#state{sock = RabbitSslSock});
+                                  State#state{sock = SslSock});
                 {error, _} = E ->
                     E
             end;
@@ -304,7 +303,7 @@ client_properties(UserProperties) ->
                {<<"version">>,   longstr, list_to_binary(Vsn)},
                {<<"platform">>,  longstr, <<"Erlang">>},
                {<<"copyright">>, longstr,
-                <<"Copyright (c) 2007-2015 Pivotal Software, Inc.">>},
+                <<"Copyright (c) 2007-2016 Pivotal Software, Inc.">>},
                {<<"information">>, longstr,
                 <<"Licensed under the MPL.  "
                   "See http://www.rabbitmq.com/">>},
@@ -323,6 +322,8 @@ handshake_recv(Expecting) ->
                     Method;
                 {'connection.tune', 'connection.close'} ->
                     Method;
+                {'connection.open_ok', 'connection.close'} ->
+                    exit(get_reason(Method));
                 {'connection.open_ok', _} ->
                     {closing,
                      #amqp_error{name        = command_invalid,
@@ -369,3 +370,6 @@ obtain() ->
         false -> ok;
         _     -> file_handle_cache:obtain()
     end.
+
+get_reason(#'connection.close'{reply_code = ErrCode}) ->
+    ?PROTOCOL:amqp_exception(ErrCode).
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_client.erl
rename to deps/amqp_client/src/amqp_rpc_client.erl
index c5bed0d37d629910a1b2d05bb484d9b931a1d732..6fadba8a5147d9f63940ac36aa96a26dd153146a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @doc This module allows the simple execution of an asynchronous RPC over
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_server.erl
rename to deps/amqp_client/src/amqp_rpc_server.erl
index 14525362d7252994b982854a7e47efdbc5104886..25be89e058b6f2a007ae91aeed220f4b9e683696 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @doc This is a utility module that is used to expose an arbitrary function
similarity index 88%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_sup.erl
rename to deps/amqp_client/src/amqp_sup.erl
index 9c928d55641259978a210d7655db14d9f0340841..1a02981bfb450162b979648e750cac8e5b23c8b0 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% @private
@@ -21,7 +21,7 @@
 
 -behaviour(supervisor2).
 
--export([start_link/0, start_connection_sup/1]).
+-export([start_link/0, is_ready/0, start_connection_sup/1]).
 -export([init/1]).
 
 %%---------------------------------------------------------------------------
@@ -31,6 +31,9 @@
 start_link() ->
     supervisor2:start_link({local, amqp_sup}, ?MODULE, []).
 
+is_ready() ->
+    whereis(amqp_sup) =/= undefined.
+
 start_connection_sup(AmqpParams) ->
     supervisor2:start_child(amqp_sup, [AmqpParams]).
 
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_uri.erl
rename to deps/amqp_client/src/amqp_uri.erl
index 04446b02265ed1c5fd3886cb64bd0a3677c0d67b..95447aba53af270d5ad089f7f301d1e7fb974b45 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(amqp_uri).
@@ -60,8 +60,8 @@ remove_credentials(URI) ->
 %% The extra parameters that may be specified are channel_max,
 %% frame_max, heartbeat and auth_mechanism (the latter can appear more
 %% than once).  The extra parameters that may be specified for an SSL
-%% connection are cacertfile, certfile, keyfile, verify, and
-%% fail_if_no_peer_cert.
+%% connection are cacertfile, certfile, keyfile, verify,
+%% fail_if_no_peer_cert, password, and depth.
 parse(Uri) -> parse(Uri, <<"/">>).
 
 parse(Uri, DefaultVHost) ->
@@ -89,6 +89,8 @@ parse1(_, _DefaultVHost) ->
 
 unescape_string(Atom) when is_atom(Atom) ->
     Atom;
+unescape_string(Integer) when is_integer(Integer) ->
+    Integer;
 unescape_string([]) ->
     [];
 unescape_string([$%, N1, N2 | Rest]) ->
@@ -170,7 +172,9 @@ build_ssl_broker(ParsedUri, DefaultVHost) ->
                        {fun find_path_parameter/1,    certfile},
                        {fun find_path_parameter/1,    keyfile},
                        {fun find_atom_parameter/1,    verify},
-                       {fun find_boolean_parameter/1, fail_if_no_peer_cert}]],
+                       {fun find_boolean_parameter/1, fail_if_no_peer_cert},
+                       {fun find_identity_parameter/1, password},
+                       {fun find_integer_parameter/1,  depth}]],
           []),
     Params#amqp_params_network{ssl_options = SSLOptions}.
 
@@ -207,14 +211,23 @@ broker_add_query(Params, ParsedUri, Fields) ->
 parse_amqp_param(Field, String) when Field =:= channel_max        orelse
                                      Field =:= frame_max          orelse
                                      Field =:= heartbeat          orelse
-                                     Field =:= connection_timeout ->
-    try return(list_to_integer(String))
-    catch error:badarg -> fail({not_an_integer, String})
-    end;
+                                     Field =:= connection_timeout orelse
+                                     Field =:= depth ->
+    find_integer_parameter(String);
+parse_amqp_param(Field, String) when Field =:= password ->
+    find_identity_parameter(String);
 parse_amqp_param(Field, String) ->
     fail({parameter_unconfigurable_in_query, Field, String}).
 
-find_path_parameter(Value) -> return(Value).
+find_path_parameter(Value) ->
+    find_identity_parameter(Value).
+
+find_identity_parameter(Value) -> return(Value).
+
+find_integer_parameter(Value) ->
+    try return(list_to_integer(Value))
+    catch error:badarg -> fail({not_an_integer, Value})
+    end.
 
 find_boolean_parameter(Value) ->
     Bool = list_to_atom(Value),
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/rabbit_routing_util.erl
rename to deps/amqp_client/src/rabbit_routing_util.erl
index 7daa7287bfaa44a03c7bb197e094332eb20d713b..537dfc63a44e5033e54a7eff59ef6942216ae69f 100644 (file)
@@ -98,7 +98,7 @@ ensure_endpoint(_Dir, _Channel, {queue, undefined}, _Params, State) ->
     {ok, undefined, State};
 
 ensure_endpoint(_, Channel, {queue, Name}, Params, State) ->
-    Params1 = rabbit_misc:pset(durable, true, Params),
+    Params1 = rabbit_misc:pmerge(durable, true, Params),
     Queue = list_to_binary(Name),
     State1 = case sets:is_element(Queue, State) of
                  true -> State;
diff --git a/deps/cowboy/AUTHORS b/deps/cowboy/AUTHORS
new file mode 100644 (file)
index 0000000..1cc04b4
--- /dev/null
@@ -0,0 +1,91 @@
+Cowboy is available thanks to the work of:
+
+Loïc Hoguin
+Magnus Klaar
+Ali Sabil
+Anthony Ramine
+Adam Cammack
+Tom Burdick
+James Fish
+Paul Oliver
+Slava Yurin
+Vladimir Dronnikov
+YAMAMOTO Takashi
+Yurii Rashkovskii
+Andrew Majorov
+Eduardo Gurgel
+Egobrain
+Josh Toft
+Steven Gravell
+Alex Kovalev
+Andrew Thompson
+Hunter Morris
+Ivan Lisenkov
+Martin Rehfeld
+Matthias Endler
+Seletskiy Stanislav
+Sina Samavati
+Tristan Sloughter
+0x00F6
+0xAX
+Adam Cammmack
+Adrian Roe
+Alexander Færøy
+Alexei Sholik
+Alexey Lebedeff
+Andre Graf
+Andrzej Sliwa
+Blake Gentry
+Bob Ippolito
+Boris Faure
+Cameron Bytheway
+Cristian Hancila
+Daniel White
+Danielle Sucher
+Dave Peticolas
+David Kelly
+David N. Welton
+DeadZen
+Dmitry Groshev
+Drew
+Drew Varner
+Eiichi Tsukata
+Fred Hebert
+Hans Ulrich Niedermann
+Ivan Blinkov
+Jeremy Ong
+Jesper Louis Andersen
+Josh Allmann
+Josh Marchán
+José Valim
+Julian Squires
+Klaus Trainer
+Kuk-Hyun Lee
+Mathieu Lecarme
+Max Lapshin
+Michael Truog
+Michiel Hakvoort
+Nakai Ryosuke
+Ori Bar
+Pablo Vieytes
+Paulo Oliveira
+Peter Ericson
+RJ
+RadosÅ‚aw Szymczyszyn
+Richard Ramsden
+Roberto Ostinelli
+Sergey Prokhorov
+Sergey Rublev
+Sergey Urbanovich
+Seven Du
+Thomas Nordström
+Tim Dysinger
+Tomas Morstein
+Unix1
+alisdair sullivan
+dbmercer
+derdesign
+mocchira
+pmyarchon
+rambocoder
+serge
diff --git a/deps/cowboy/CHANGELOG.md b/deps/cowboy/CHANGELOG.md
new file mode 100644 (file)
index 0000000..edeb748
--- /dev/null
@@ -0,0 +1,910 @@
+CHANGELOG
+=========
+
+1.0.3
+-----
+
+ *  Add CircleCI for automated testing across all supported Erlang versions
+
+ *  Only send the Connection header when necessary
+
+ *  Send Connection: keep-alive when HTTP/1.0 connections are kept open
+
+ *  Fix a typo in a return value of cowboy_http:asctime_date
+
+ *  Fix Dialyzer warnings in cowboy_clock
+
+1.0.2
+-----
+
+ *  Fix cowboy_clock inbox overflow when system clock changes (but see 1.0.3)
+
+ *  Fix stacktrace shown on REST handler crashes
+
+ *  Reply with 400 on if-match and if-not-match parsing crash
+
+ *  Improve static_world example by adding index.html file
+
+1.0.1
+-----
+
+ *  Add missing cowboy_req:part/2 export
+
+ *  Fix an issue building dependencies on FreeBSD
+
+ *  Improve support for requests with an absolute URI
+
+ *  Small fixes to the documentation
+
+1.0.0
+-----
+
+ *  Drop R15 support
+
+ *  Update erlang.mk, Ranch and Cowlib to 1.0.0
+
+ *  Complete the user guide and simplify the Getting started chapter
+
+ *  Document the HTTP status codes Cowboy may send in the manual
+
+ *  Provide installable man pages (see README)
+
+ *  Support ad-hoc keep-alive for HTTP/1.0 clients
+
+ *  Fix SPDY parsing error when many frames were received in one packet
+
+ *  Reply with 400 instead of 422 in cowboy_rest
+
+ *  Reply with 400 instead of 500 on header parsing crash
+
+ *  Remove deprecated body reading interface (see 0.10.0 notes)
+
+0.10.0
+------
+
+ *  Update Ranch to 0.10 and Cowlib to 0.6.2
+
+ *  Update the body reading API to allow controlling rate of transfer
+
+    The lack of this feature was causing various timeout issues
+    in some environments.
+
+    The body/2 function now takes a Req and a list of options. The older
+    interface can still be used until Cowboy 1.0.
+
+    The body_qs/2, part/2 and part_body/2 also accept this list of
+    options, and pass it down to the body/2 call. The default options
+    vary between the different functions.
+
+    The return value of the function has changed. Older code should work
+    without modification but you should definitely still test it.
+
+    All functions appropriately decode transfer and content encoding.
+    There is no need to have a special case for that anymore.
+
+    The body/1,2 function supports streaming, with the same interface
+    as the part_body/1,2 function.
+
+ *  Deprecate the cowboy_req:init_stream, stream_body and skip_body functions
+
+    They will be removed in Cowboy 1.0.
+
+ *  Add support for multipart
+
+    The cowboy_req:part/1,2 and cowboy_req:part_body/1,2 can be
+    used for reading a multipart body.
+
+    Documentation has been added.
+
+    The old undocumented multipart functions were removed.
+
+ *  Allow the onresponse hook to override only status and headers
+
+    Previously it wasn't possible to override them without also
+    overriding the body. The new return value is currently marked
+    as experimental.
+
+ *  Make loop handlers work with SPDY
+
+ *  Fix a race condition with loop handlers and keepalive requests
+
+ *  Fix parsing of accept-language header
+
+ *  Fix parsing of authorization header with empty passwords
+
+ *  Fix multiline headers handling
+
+ *  Various optimizations
+
+    All code that is moved to cowlib is optimized at the same time
+    and benchmarks get added for all functions.
+
+    The parsing of connection, content-length and transfer-encoding
+    has been optimized.
+
+    Chunked transfer decoding has been optimized.
+
+ *  Enable +warn_missing_spec by default and fix specs
+
+ *  Remove the non-documented cowboy_client module; use gun instead
+
+ *  Numerous documentation updates and tweaks
+
+    The guide now has a REST principles chapter and revised
+    and completed Websocket chapters, alongside a new multipart chapter.
+
+ *  Add a multipart file upload example
+
+ *  Test suites are being reworked and greatly improved
+   *  Test Cowboy across all releases from R15B01 to 17.0, plus maint and master
+   *  The Autobahn test suite is now always ran (as long as it's installed)
+   *  Expected error reports are now silenced (but still logged)
+   *  Applications are now only started once from a ct hook
+
+0.9.0
+-----
+
+ *  Update Ranch to 0.9.0
+
+ *  SPDY is no longer experimental and is documented
+
+    The SPDY development has been sponsored by the LeoFS project.
+
+ *  Review, improve and document cowboy_static
+   *  Much simplified configuration
+   *  Etag generation is now enabled by default
+   *  Web mimetypes are now detected by default
+   *  Optionally a huge list of mimetypes can also be used
+   *  It not try to magically find the priv directory anymore, use ERL_LIBS
+
+ *  Remove the pretty printing of errors
+
+    Cowboy will no longer print errors, it will instead let the process
+    crash properly, so that links can work with Cowboy. Ranch will catch
+    errors and print a one-liner containing all the error information
+    instead.
+
+ *  Trigger a human readable error when routes lack the starting slash
+
+ *  Add websocket_compress metadata
+
+ *  Fix parsing of hosts given as IPv6 addresses
+
+ *  Fix the decoding of chunked bodies
+
+ *  Fix handling of close, ping and pong Websocket replies
+
+ *  Fix the x-webkit-deflate-frame Websocket extension
+
+ *  Fix PUT behavior in cowboy_rest when creating a resource at the request URL
+
+ *  Fix warnings with the reltool GUI
+
+ *  Start moving code in a new dependency, cowlib
+
+    The code moved there is mostly parsing code and utility functions.
+    Most of it was in the undocumented cowboy_http module before.
+
+ *  Optimize cookie date building and query string building/parsing
+
+ *  Great number of improvements and additions to the user guide
+
+ *  Convert all examples to releases using the erlang.mk+relx combo
+
+    Some examples have also been fixed or slightly improved.
+    The elixir example is now on a separate repository maintained
+    independently. More examples in this language exist in that
+    other repository.
+
+0.8.6
+-----
+
+ *  Make sure Cowboy compiles on R16B01
+
+ *  Update Ranch to 0.8.4
+
+ *  Add experimental support for the x-webkit-deflate-frame Websocket extension
+
+    This allows Cowboy to handle compressed Websocket frames,
+    lowering the amount of data that needs to be sent over the
+    socket.
+
+    The extension will only be used if compression was enabled
+    using the `compress` protocol option.
+
+ *  Add experimental SPDY support
+
+    SPDY is a new protocol implemented by most browsers. It is
+    the basis for what will become HTTP/2.0.
+
+    To use SPDY, you need to call `start_spdy` where you would
+    have used `start_https` before.
+
+    This protocol is still incomplete. It cannot accept request
+    bodies yet, making most methods other than GET and HEAD
+    not too useful at this point.
+
+ *  Allow an empty method list in allowed_methods
+
+ *  The charset parameter of content-types is now always lowercase
+
+ *  Don't overwrite the stacktrace when a REST handler crashes
+
+ *  Don't crash when the Cookie header is empty
+
+ *  Don't crash on invalid Accept-Encoding header when replying
+
+0.8.5
+-----
+
+ *  Add the Cowboy Function Reference
+
+    Everything documented in the function reference is the API
+    that will make it to Cowboy 1.0.
+
+ *  Use erlang.mk
+
+    The project is of course still compatible with rebar
+    and can be used as a dependency just fine.
+
+ *  Update Ranch to 0.8.3
+
+ *  Remove cowboy_req:fragment/1
+
+    No well-written client is sending the fragment with the URL.
+
+ *  Add cowboy_req:set_resp_body_fun(chunked, Fun, Req)
+
+ *  Improve various typespecs
+
+ *  Change the return value of cowboy_req:version/1
+
+    We now have 'HTTP/1.1' instead of {1, 1} and 'HTTP/1.0'
+    instead of {1, 0}.
+
+ *  Change the return value of REST accept callbacks
+
+    The Path return value becomes {true, Path}.
+
+ *  Change the return value of REST charsets_provided/2
+
+    It was incorrectly expecting a list of tuples instead of
+    a list of charsets.
+
+ *  Move various types to the cowboy module
+   *  cowboy_http:version() to cowboy:http_version()
+   *  cowboy_http:headers() to cowboy:http_headers()
+   *  cowboy_http:status() to cowboy:http_status()
+   *  cowboy_protocol:onrequest_fun() to cowboy:onrequest_fun()
+   *  cowboy_protocol:onresponse_fun() to cowboy:onresponse_fun()
+
+ *  Add type cowboy_protocol:opts()
+
+ *  Fix a REST bug with the OPTIONS method
+
+ *  Fix a REST bug where iso-8859-1 would be incoditionally selected
+
+0.8.4
+-----
+
+ *  Cookie names are now back to being case sensitive
+
+    This should be more in line with what browsers do and what
+    users would expect.
+
+ *  REST is no longer experimental and is documented
+
+ *  REST behavior fixed when used with the POST method
+
+    Removes process_post, post_is_create, create_path, created_path
+    callbacks. It is up to the resource accept callback to decide
+    what to do when the POST method is used. Depending on the return
+    value Cowboy will determine if the resource was created or not.
+
+ *  Removes the put_path meta value in REST
+
+ *  Fix an issue in REST with the PATCH method
+
+    Content-types were not normalized as expected, preventing the
+    use of the binary form for content-types.
+
+ *  Add default operations for the OPTIONS method in REST
+
+    The default will be to set the Allow header in the response
+    based on the return value from allowed_methods.
+
+ *  Add default content_types_provided "text/html" maps to to_html
+
+    This allows non-HEAD/GET methods to work without defining
+    the callback explicitly.
+
+ *  Reject invalid content-types explicitly in REST
+
+ *  Don't accept TRACE or CONNECT methods by default in REST
+
+ *  Remove cowboy_req:peer_addr/1
+
+    Because each server's proxy situation differs, it is better
+    that this function is implemented by the application directly.
+
+    The X-Forwarded-For header can now be parsed using
+    cowboy_req:parse_header/2.
+
+ *  Switch the arguments to cowboy_req:stream_body/2
+
+    They were in the wrong order compared to the rest of the module.
+
+ *  Add parser for the Range header
+
+ *  Do not crash if connection times out while sending a file using sendfile
+
+ *  Ensure we can fetch the body in the info/3 function of loop handlers
+
+ *  Update Ranch to 0.8.1
+
+ *  Reorganize and optimize the test suites
+
+0.8.3
+-----
+
+ *  Remove init_stream/5, add stream_body/2
+
+    It's better to allow configuring the streamed chunk size on
+    a per chunk basis. Also easier to use.
+
+ *  Update Ranch to 0.8.0
+
+    Much faster. Also improved stability.
+
+0.8.2
+-----
+
+ *  Add error_hook and ssl_hello_world example
+
+ *  Greatly improve the performance of body reading operations
+
+    The streamed chunk size is now configurable through the new
+    function cowboy_req:init_stream/5.
+
+ *  Add cowboy_req:body/2 and cowboy_req:body_qs/2
+
+    These functions take an additional argument indicating the
+    maximum size of the body. They will return {error, badlength}
+    if the size is too large, or {error, chunked} if the body
+    was sent using the chunked Transfer-Encoding and its size
+    cannot be determined.
+
+    The function body/1 is now an alias to body/2 with a maximum
+    body size of 8MB. Likewise, the function body_qs/1 is an alias
+    of body_qs/2 with a maximum body size of 16KB.
+
+ *  Properly handle explicit identity Transfer-Encoding in body_length/1
+
+ *  Small but noticeable performance improvement in the critical path
+
+    We stopped using binary:match/2 in favor of custom functions.
+    This makes Cowboy 0.5ms faster per request.
+
+ *  Prevent loop handlers from awakening after sending a response
+
+ *  Optimize cowboy_static initialization code
+
+ *  Make path checks in cowboy_static cross-platform
+
+ *  Allow '*' for REST content types parameters in content_types_provided
+
+ *  Fix cowboy_router types
+
+ *  Update Ranch to 0.6.2; adds support for two new SSL options
+
+ *  Improve documentation
+
+0.8.1
+-----
+
+ *  Add eventsource, web_server examples; improve rest_pastebin example
+
+ *  Add cowboy:set_env/3 to more conveniently update the dispatch list
+
+ *  Add cowboy_sub_protocol behaviour
+
+ *  Fix cowboy_req:has_body/1 when Content-Length == 0
+
+ *  Fix passing of state to websocket_terminate/3 on server close
+
+ *  Fix compilation with +native
+
+ *  Compile with more warnings enabled by default; fix warnings
+
+ *  Set the socket in passive mode after the loop handler terminates
+
+ *  Improve typespecs
+
+0.8.0
+-----
+
+ *  This release drops R14 compatibility
+
+    Behaviours now use the -callback attribute which is supported only
+    since R15B.
+
+ *  Add a user guide
+
+ *  Add or update many examples
+
+    Add basic_auth, compress_response, cookie, elixir_hello_world,
+    markdown_middleware, rest_pastebin, rest_stream_response
+    and websocket examples.
+
+    Rename the static example to static_world for clarity.
+
+ *  Add CONTRIBUTING.md file
+
+ *  Use Ranch 0.6.1 for connection handling
+
+    To start listeners you can now use cowboy:start_http/4 for HTTP,
+    and cowboy:start_https/4 for HTTPS. The proper transport and
+    protocol modules will be used.
+
+ *  Add protection against slowloris vulnerability
+
+    This protection is always enabled and has no impact on the performance
+    of the system.
+
+ *  Add a better routing syntax
+
+ *  If a binding is used twice in routing, values must now be identical
+
+ *  Add support for a configurable chain of middlewares
+
+    Routing and handling are now two separate middlewares that can be
+    replaced as needed.
+
+ *  Fix application dependencies
+
+    The crypto application must be started before Cowboy.
+
+    The inets application is no longer needed. A few functions from
+    that application were used by mistake in the REST code.
+
+ *  Shorten the name of many modules
+   *  cowboy_http_protocol becomes cowboy_protocol
+   *  cowboy_http_req becomes cowboy_req
+   *  cowboy_http_rest becomes cowboy_rest
+   *  cowboy_http_static becomes cowboy_static
+   *  cowboy_http_websocket becomes cowboy_websocket
+
+ *  Introduce the cowboy_req:req() opaque type
+
+    The include/http.hrl file was removed. Users are expected to use
+    the cowboy_req API to access or modify the Req object.
+
+    This required a lot of changes so cleanup and optimizations were
+    performed where possible.
+
+ *  Add many cowboy_req functions
+   *  cowboy_req:delete_resp_header/2 deletes a previously set resp header
+   *  cowboy_req:set_meta/3 sets metadata in the Req object
+   *  cowboy_req:to_list/1 converts the Req object to a list of key/values
+   *  cowboy_req:fragment/1 returns the request URL fragment
+   *  cowboy_req:host_url/1 returns the request URL without the path or qs
+   *  cowboy_req:url/1 returns the full request URL
+   *  cowboy_req:set_resp_body_fun/2 for body streaming with no known length
+
+ *  Improve the body streaming interface in cowboy_req
+
+    The function now receives the Transport and Socket directly as arguments.
+
+ *  Rename or drop many cowboy_req functions
+   *  cowboy_req:raw_host/1 becomes cowboy_req:host/1, old function dropped
+   *  cowboy_req:raw_path/1 becomes cowboy_req:path/1, old function dropped
+   *  cowboy_req:raw_qs/1 becomes cowboy_req:qs/1
+   *  Remove cowboy_req:body/2
+   *  Remove cowboy_req:transport/1
+
+ *  Change the signature of many cowboy_req functions
+   *  parse_header now returns {ok, any(), Req} instead of {any(), Req}
+   *  body_qs now returns {ok, QsVals, Req} instead of {QsVals, Req}
+   *  multipart_data now returns {headers, Headers, Req} instead of
+      {{headers, Headers}, Req} and {body, Body, Req} instead of
+      {{body, Body}, Req}
+   *  set_resp_* functions now return Req instead of {ok, Req}
+   *  has_body now returns boolean()
+
+ *  Rewrote cookie code
+
+    In short we now do the same thing as PHP when setting cookies. This
+    allows us to be fairly confident that our code will work on the vast
+    majority of browsers.
+
+ *  Fix consistency issues caused by erlang:decode_packet/3
+   *  The method is now always a case sensitive binary string
+   *  Note that standard method names are uppercase (e.g. <<"GET">>)
+   *  Header names are now always lowercase binary string
+
+ *  The max_line_length cowboy_protocol option was replaced by 3 new options:
+   *  max_request_line_length, defaults to 4096 bytes
+   *  max_header_name_length, defaults to 64 bytes
+   *  max_header_value_length, defaults to 4096 bytes
+
+ *  Add max_headers option, limiting the number of headers; defaults to 100
+
+ *  The max_keepalive option now defaults to 100 instead of infinity
+
+ *  Change terminate/2 to terminate/3 in the HTTP handler interface
+
+ *  Enhance the loop handler API
+   *  Connection close is now better detected
+   *  Fix an internal message leak
+
+ *  Enhance the Websocket API
+   *  Change a websocket error from {error, protocol} to {error, badframe}
+   *  Allow websocket handlers to reply more than one frame
+   *  Check for errors when calling Transport:send/2 to avoid crashes
+   *  Add close, {close, Payload}, {close, StatusCode, Payload},
+      ping, pong frame types for replies
+   *  Ensure websocket_terminate is always called
+   *  Improve timeout handling
+   *  Remove support for the old hixie76 protocol
+   *  Add parsing support for Sec-Websocket-Protocol
+   *  Check for UTF-8 correctness of text frames
+   *  Perform unmasking and UTF-8 validation on the fly
+   *  Reject clients that send unmasked frames
+   *  Add cowboy_websocket:close_code/0 type
+
+ *  Enhance the REST API
+   *  Fix charset handling
+   *  Add PATCH support
+   *  Add created_path callback, used if create_path was not defined
+   *  Make sure rest_terminate is always called
+
+ *  Improved HTTP standard compatibility
+   *  Revised status code used in responses
+   *  Implement authorization header parsing
+   *  Add opt-in automatic response body compression
+
+ *  Improve lager compatibility
+
+    We format errors in a special way so that lager can recognize Cowboy
+    errors and put them on a single line.
+
+ *  Remove the urldecode cowboy_protocol option
+
+ *  Add cowboy_protocol:onrequest_fun/0 and :onresponse_fun/0 types
+
+ *  Add the body data to onresponse_fun/0 callback
+
+ *  Avoid a duplicate HTTP reply in cowboy_websocket:upgrade_error/1
+
+ *  Fix use of the Vary header, was named Variances in the previous code
+
+ *  Improve returned status code for HTTP and REST
+
+ *  Fix charsets_provided return value
+
+ *  Allow passing {M, F} for the mimetype function to cowboy_static
+
+ *  Can now upgrade protocols with {upgrade, protocol, P, Req, Opts}
+
+ *  Cowboy now only expects universal time, never local time
+
+ *  Do not try skipping the body if the connection is to be closed
+
+ *  Add cowboy_bstr:to_upper/1, cowboy_bstr:capitalize_token/1
+
+ *  Many, many optimizations for the most critical code path
+
+0.6.1
+-----
+
+ *  Add hello_world, rest_hello_world, chunked_hello_world,
+    echo_get, echo_post and static examples.
+
+ *  Add support for the "Expect: 100-continue" header.
+
+ *  Keep the original 'Host' header value instead of modifying it.
+
+ *  Fix use of parsed headers cache.
+
+ *  REST: fix the matching of charsets.
+
+ *  REST: allow <<"type/subtype">> format for content_types_accepted.
+
+ *  Improve typespecs.
+
+0.6.0
+-----
+
+ *  Add multipart support
+
+ *  Add chunked transfer decoding support
+
+    Done by reworking the body reading API. Now all the body
+    reading goes through the cowboy_http_req:stream_body/1
+    function. This function takes care of handling both the
+    Transfer-Encoding and the Content-Encoding, returning
+    properly decoded data ready for consumption.
+
+ *  Add fragmented websocket messages support
+
+    Properly tested by the addition of the Autobahn websocket
+    test suite to our toolbox. All tests pass except a few
+    related to UTF-8 handling, as Cowboy does no checks on that
+    end at this point.
+
+ *  Add 'onrequest' and 'onresponse' hooks
+
+    The first can be used for all the special cases you may have
+    that can't be dealt with otherwise. It's also pretty good for
+    writing access logs or rewriting URLs.
+
+    The second can be used for logging errors or replacing error
+    pages, amongst others.
+
+ *  Add cowboy:get_protocol_options/1 and cowboy:set_protocol_options/2
+
+    These functions allow for retrieving a listener's protocol options,
+    and for modifying them while the listener is running. This is
+    most useful to upgrade the dispatch list. The upgrade applies
+    to all the future connections.
+
+ *  Add the sockname/1 function to TCP and SSL transports
+
+ *  Improve SSL transport support
+
+    Add support for specifying the ciphers. Add CA support. Make
+    specifying the password optional.
+
+ *  Add new HTTP status codes from RFC 6585
+
+ *  Add a 'file' option to cowboy_http_static
+
+    This allows for mapping /folder/ paths to a /folder/index.html file.
+
+ *  Add the '*' catch all Content-Type for REST
+
+ *  Add {halt, Req, State} as a possible return value for REST
+
+ *  Add absolute URI support for requests
+
+ *  Add cowboy_http:x_www_form_urlencoded/2
+
+ *  Various REST bug fixes
+
+ *  Do not send chunked replies for HTTP/1.0 connections
+
+ *  Fix a DST bug in the cookies code
+
+ *  Fix a bug with setting cookie values containing slashes
+
+ *  Fix a small timer leak when using loop/websocket timeouts
+
+ *  Make charset and media type parsing more relaxed
+
+    This is to accomodate some widely used broken clients.
+
+ *  Make error messages more readable
+
+ *  Fix and improve type specifications
+
+ *  Fix a bug preventing documentation from being generated
+
+ *  Small improvements to the documentation
+
+ *  Rework the HTTP test suite
+
+    The suite now uses an integrated Cowboy HTTP client. The client
+    is currently experimental and shouldn't be used.
+
+ *  Add many many tests.
+
+0.4.0
+-----
+
+ *  Set the cowboy_listener process priority to high
+
+    As it is the central process used by all incoming requests
+    we need to set its priority to high to avoid timeouts that
+    would happen otherwise when reaching a huge number of
+    concurrent requests.
+
+ *  Add cowboy:child_spec/6 for embedding in other applications
+
+ *  Add cowboy_http_rest, an experimental REST protocol support
+
+    Based on the Webmachine diagram and documentation. It is a
+    new implementation, not a port, therefore a few changes have
+    been made. However all the callback names are the same and
+    should behave similarly to Webmachine.
+
+    There is currently no documentation other than the Webmachine
+    resource documentation and the comments found in cowboy_http_rest,
+    which itself should be fairly easy to read and understand.
+
+ *  Add cowboy_http_static, an experimental static file handler
+
+    Makes use of the aforementioned REST protocol support to
+    deliver files with proper content type and cache headers.
+
+    Note that this uses the new file:sendfile support when
+    appropriate, which currently requires the VM to be started
+    with the +A option defined, else errors may randomly appear.
+
+ *  Add cowboy_bstr module for binary strings related functions
+
+ *  Add cowboy_http module for HTTP parsing functions
+
+    This module so far contains various functions for HTTP header
+    parsing along with URL encoding and decoding.
+
+ *  Remove quoted from the default dependencies
+
+    This should make Cowboy much easier to compile and use by default.
+    It is of course still possible to use quoted as your URL decoding
+    library in Cowboy thanks to the newly added urldecode option.
+
+ *  Fix supervisor spec for non dynamic modules to allow upgrades to complete
+
+ *  Add cowboy:accept_ack/1 for a cleaner handling of the shoot message
+
+    Before, when the listener accepted a connection, the newly created
+    process was waiting for a message containing the atom 'shoot' before
+    proceeding. This has been replaced by the cowboy:accept_ack/1 function.
+
+    This function should be used where 'shoot' was received because the
+    contents of the message have changed (and could change again in the
+    distant future).
+
+ *  Update binary parsing expressions to avoid hype crashes
+
+    More specifically, /bits was replaced by /binary.
+
+ *  Rename the type cowboy_dispatcher:path_tokens/0 to tokens/0
+
+ *  Remove the cowboy_clock:date/0, time/0 and datetime/0 types
+
+    The calendar module exports those same types properly since R14B04.
+
+ *  Add cacertfile configuration option to cowboy_ssl_transport
+
+ *  Add cowboy_protocol behaviour
+
+ *  Remove -Wbehaviours dialyzer option unavailable in R15B
+
+ *  Many tests and specs improvements
+
+### cowboy_http_req
+
+ *  Fix a crash when reading the request body
+
+ *  Add parse_header/2 and parse_header/3
+
+    The following headers can now be semantically parsed: Connection, Accept,
+    Accept-Charset, Accept-Encoding, Accept-Language, Content-Length,
+    Content-Type, If-Match, If-None-Match, If-Modified-Since,
+    If-Unmodified-Since, Upgrade
+
+ *  Add set_resp_header/3, set_resp_cookie/4 and set_resp_body/2
+
+    These functions allow handlers to set response headers and body
+    without having to reply directly.
+
+ *  Add set_resp_body_fun/3
+
+    This function allows handlers to stream the body of the response
+    using the given fun. The size of the response must be known beforehand.
+
+ *  Add transport/1 to obtain the transport and socket for the request
+
+    This allows handlers to have low-level socket access in those cases
+    where they do need it, like when streaming a response body with
+    set_resp_body_fun/3.
+
+ *  Add peer_addr/1
+
+    This function tries to guess the real peer IP based on the HTTP
+    headers received.
+
+ *  Add meta/2 and meta/3 to save useful protocol information
+
+    Currently used to save the Websocket protocol version currently used,
+    and to save request information in the REST protocol handler.
+
+ *  Add reply/2 and reply/3 aliases to reply/4
+
+ *  Add upgrade_reply/3 for protocol upgrades
+
+### cowboy_http_protocol
+
+ *  Add the {urldecode, fun urldecode/2} option
+
+    Added when quoted was removed from the default build. Can be used to
+    tell Cowboy to use quoted or any other URL decoding routine.
+
+ *  Add the max_keepalive option
+
+ *  Add the max_line_length option
+
+ *  Allow HTTP handlers to stop during init/3
+
+    To do so they can return {shutdown, Req, State}.
+
+ *  Add loops support in HTTP handlers for proper long-polling support
+
+    A loop can be entered by returning either of {loop, Req, State},
+    {loop, Req, State, hibernate}, {loop, Req, State, Timeout} or
+    {loop, Req, State, Timeout, hibernate} from init/3.
+
+    Loops are useful when we cannot reply immediately and instead
+    are waiting for an Erlang message to be able to complete the request,
+    as would typically be done for long-polling.
+
+    Loop support in the protocol means that timeouts and hibernating
+    are well tested and handled so you can use those options without
+    worrying. It is recommended to set the timeout option.
+
+    When a loop is started, handle/2 will never be called so it does
+    not need to be defined. When the request process receives an Erlang
+    message, it will call the info/3 function with the message as the
+    first argument.
+
+    Like in OTP, you do need to set timeout and hibernate again when
+    returning from info/3 to enable them until the next call.
+
+ *  Fix the sending of 500 errors when handlers crash
+
+    Now we send an error response when no response has been sent,
+    and do nothing more than close the connection if anything
+    did get sent.
+
+ *  Fix a crash when the server is sent HTTP responses
+
+ *  Fix HTTP timeouts handling when the Request-Line wasn't received
+
+ *  Fix the handling of the max number of empty lines between requests
+
+ *  Fix the handling of HEAD requests
+
+ *  Fix HTTP/1.0 Host header handling
+
+ *  Reply status 400 if we receive an unexpected value or error for headers
+
+ *  Properly close when the application sends "Connection: close" header
+
+ *  Close HTTP connections on all errors
+
+ *  Improve the error message for HTTP handlers
+
+### cowboy_http_websocket
+
+ *  Add websocket support for all versions up to RFC 6455
+
+    Support isn't perfect yet according to the specifications, but
+    is working against all currently known client implementations.
+
+ *  Allow websocket_init/3 to return with the hibernate option set
+
+ *  Add {shutdown, Req} return value to websocket_init/3 to fail an upgrade
+
+ *  Fix websocket timeout handling
+
+ *  Fix error messages: wrong callback name was reported on error
+
+ *  Fix byte-by-byte websocket handling
+
+ *  Fix an issue when using hixie-76 with certain proxies
+
+ *  Fix a crash in the hixie-76 handshake
+
+ *  Fix the handshake when SSL is used on port 443
+
+ *  Fix a crash in the handshake when cowboy_http_req:compact/1 is used
+
+ *  Fix handshake when a query string is present
+
+ *  Fix a crash when the Upgrade header contains more than one token
+
+0.2.0
+-----
+
+ *  Initial release.
diff --git a/deps/cowboy/CONTRIBUTING.md b/deps/cowboy/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..7bc005b
--- /dev/null
@@ -0,0 +1,173 @@
+Contributing
+============
+
+Introduction
+------------
+
+This document describes the usages and rules to follow when contributing
+to this project.
+
+It uses the uppercase keywords SHOULD for optional but highly recommended
+conditions and MUST for required conditions.
+
+`git` is a distributed source code versioning system. This document refers
+to three different repositories hosting the source code of the project.
+`Your local copy` refers to the copy of the repository that you have on
+your computer. The remote repository `origin` refers to your fork of the
+project's repository that you can find in your GitHub account. The remote
+repository `upstream` refers to the official repository for this project.
+
+Following this document will ensure prompt merging of your work in the
+`master` branch of the project.
+
+Reporting bugs
+--------------
+
+Upon identifying a bug or a DoS vulnerability, you SHOULD submit a ticket,
+regardless of your plans for fixing it. If you plan to fix the bug, you
+SHOULD discuss your plans to avoid having your work rejected.
+
+Upon identifying a security vulnerability in Erlang/OTP that leaves Cowboy
+vulnerable to attack, you SHOULD consult privately with the Erlang/OTP team
+to get the issue resolved.
+
+Upon identifying a security vulnerability in Cowboy's `cowboy_static` module,
+you SHOULD submit a ticket, regardless of your plans for fixing it. Please
+ensure that all necessary details to reproduce are listed. You then SHOULD
+inform users on the mailing list about the issue, advising that they use
+another means for sending static files until the issue is resolved.
+
+Upon identifying a security vulnerability in any other part of Cowboy, you
+SHOULD contact us directly by email. Please ensure that all necessary details
+to reproduce are listed.
+
+Before implementing a new feature, you SHOULD submit a ticket for discussion
+on your plans. The feature might have been rejected already, or the
+implementation might already be decided.
+
+Cloning
+-------
+
+You MUST fork the project's repository to your GitHub account by clicking
+on the `Fork` button.
+
+Then, from your fork's page, copy the `Git Read-Only` URL to your clipboard.
+You MUST perform the following commands in the folder you choose, replacing
+`$URL` by the URL you just copied, `$UPSTREAM_URL` by the `Git Read-Only`
+project of the official repository, and `$PROJECT` by the name of this project.
+
+``` bash
+$ git clone "$URL"
+$ cd $PROJECT
+$ git remote add upstream $UPSTREAM_URL
+```
+
+Branching
+---------
+
+Before starting working on the code, you MUST update to `upstream`. The
+project is always evolving, and as such you SHOULD always strive to keep
+up to date when submitting patches to make sure they can be merged without
+conflicts.
+
+To update the current branch to `upstream`, you can use the following commands.
+
+``` bash
+$ git fetch upstream
+$ git rebase upstream/master
+```
+
+It may ask you to stash your changes, in which case you stash with:
+
+``` bash
+$ git stash
+```
+
+And put your changes back in with:
+
+``` bash
+$ git stash pop
+```
+
+You SHOULD use these commands both before working on your patch and before
+submitting the pull request. If conflicts arise it is your responsability
+to deal with them.
+
+You MUST create a new branch for your work. First, ensure you are on `master`.
+You MUST update `master` to `upstream` before doing anything. Then create a
+new branch `$BRANCH` and switch to it.
+
+``` bash
+$ git checkout -b $BRANCH
+```
+
+You MUST use a an insightful branch name.
+
+If you later need to switch back to an existing branch `$BRANCH`, you can use:
+
+``` bash
+$ git checkout $BRANCH
+```
+
+Source editing
+--------------
+
+The following rules MUST be followed:
+ *  Indentation uses horizontal tabs (1 tab = 4 columns)
+ *  Do NOT align code; only indentation is allowed
+ *  Lines MUST NOT span more than 80 columns
+
+The following rules SHOULD be followed:
+ *  Write small functions whenever possible
+ *  Avoid having too many clauses containing clauses containing clauses
+
+Committing
+----------
+
+You MUST ensure that all commits pass all tests and do not have extra
+Dialyzer warnings.
+
+Running tests is fairly straightforward. Note that you need at least
+Erlang/OTP R16B01 for the SSL tests to run.
+
+``` bash
+make tests
+```
+
+Running Dialyzer requires some initial setup. You need to build the PLT
+file that Dialyzer will use for its analysis. This is a one-time operation.
+Dialyzer will take care of updating that file when needed.
+
+``` bash
+make build-plt
+```
+
+Once that is done, you can run Dialyzer.
+
+``` bash
+make dialyze
+```
+
+You MUST put all the related work in a single commit. Fixing a bug is one
+commit, adding a feature is one commit, adding two features is two commits.
+
+You MUST write a proper commit title and message. The commit title MUST be
+at most 72 characters; it is the first line of the commit text. The second
+line of the commit text MUST be left blank. The third line and beyond is the
+commit message. You SHOULD write a commit message. If you do, you MUST make
+all lines smaller than 80 characters. You SHOULD explain what the commit
+does, what references you used and any other information that helps
+understanding your work.
+
+Submitting the pull request
+---------------------------
+
+You MUST push your branch `$BRANCH` to GitHub, using the following command:
+
+``` bash
+$ git push origin $BRANCH
+```
+
+You MUST then submit the pull request by using the GitHub interface.
+You SHOULD provide an explanatory message and refer to any previous ticket
+related to this patch.
similarity index 92%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/LICENSE
rename to deps/cowboy/LICENSE
index 7de99bbf105f8a6ef16ff13860e63e70ebcf7866..e7435f8dd856ea36e41bd7b964350deb118cccbb 100644 (file)
@@ -1,4 +1,4 @@
-Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/deps/cowboy/Makefile b/deps/cowboy/Makefile
new file mode 100644 (file)
index 0000000..a186286
--- /dev/null
@@ -0,0 +1,75 @@
+# See LICENSE for licensing information.
+
+PROJECT = cowboy
+
+# Options.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_all +warn_export_vars \
+       +warn_shadow_vars +warn_obsolete_guard +warn_missing_spec
+COMPILE_FIRST = cowboy_middleware cowboy_sub_protocol
+CT_OPTS += -pa test -ct_hooks cowboy_ct_hook []
+PLT_APPS = crypto public_key ssl
+CI_OTP = OTP_R16B01 OTP_R16B02 OTP_R16B03-1 OTP-17.0.2 OTP-17.1.2 OTP-17.2.2 OTP-17.3.4 OTP-17.4.1 OTP-17.5.6.2 OTP-18.0.2
+
+# Dependencies.
+
+DEPS = cowlib ranch
+TEST_DEPS = ct_helper gun
+dep_ct_helper = git https://github.com/extend/ct_helper.git master
+dep_gun = git https://github.com/ninenines/gun b85c1f726ca49ac0e3abdcf717317cb95b06207d
+
+# Standard targets.
+
+include erlang.mk
+
+# Documentation.
+
+dep_ezdoc = git https://github.com/ninenines/ezdoc master
+$(eval $(call dep_target,ezdoc))
+
+build-doc-deps: $(DEPS_DIR)/ezdoc
+       $(MAKE) -C $(DEPS_DIR)/ezdoc
+
+define ezdoc_script
+io:format("Building manual~n"),
+[begin
+       AST = ezdoc:parse_file(F),
+       BF = filename:rootname(filename:basename(F)),
+       io:format("  ~s~n", [BF]),
+       file:write_file("doc/markdown/manual/" ++ BF ++ ".md", ezdoc_markdown:export(AST)),
+       case BF of
+               "cowboy" ++ _ when BF =/= "cowboy_app" ->
+                       file:write_file("doc/man3/" ++ BF ++ ".3", ezdoc_man:export(3, AST));
+               _ when BF =/= "index" ->
+                       file:write_file("doc/man7/" ++ BF ++ ".7", ezdoc_man:export(7, AST));
+               _ ->
+                       ok
+       end
+end || F <- filelib:wildcard("doc/src/manual/*.ezdoc")],
+io:format("Building guide~n"),
+[begin
+       AST = ezdoc:parse_file(F),
+       BF = filename:rootname(filename:basename(F)),
+       io:format("  ~s~n", [BF]),
+       file:write_file("doc/markdown/guide/" ++ BF ++ ".md", ezdoc_markdown:export(AST))
+end || F <- filelib:wildcard("doc/src/guide/*.ezdoc")],
+io:format("Done.~n"),
+init:stop().
+endef
+export ezdoc_script
+
+docs:: clean-docs build-doc-deps
+       @mkdir -p doc/man3 doc/man7 doc/markdown/guide doc/markdown/manual
+       $(gen_verbose) erl -noinput -pa ebin deps/ezdoc/ebin -eval "$$ezdoc_script"
+       @gzip doc/man3/*.3 doc/man7/*.7
+       @cp doc/src/guide/*.png doc/markdown/guide
+
+clean-docs:
+       $(gen_verbose) rm -rf doc/man3 doc/man7 doc/markdown
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+
+install-docs:
+       mkdir -p $(MAN_INSTALL_PATH)/man3/ $(MAN_INSTALL_PATH)/man7/
+       install -g 0 -o 0 -m 0644 doc/man3/*.gz $(MAN_INSTALL_PATH)/man3/
+       install -g 0 -o 0 -m 0644 doc/man7/*.gz $(MAN_INSTALL_PATH)/man7/
diff --git a/deps/cowboy/README.md b/deps/cowboy/README.md
new file mode 100644 (file)
index 0000000..2919e63
--- /dev/null
@@ -0,0 +1,50 @@
+Cowboy
+======
+
+Cowboy is a small, fast and modular HTTP server written in Erlang.
+
+Goals
+-----
+
+Cowboy aims to provide a **complete** HTTP stack in a **small** code base.
+It is optimized for **low latency** and **low memory usage**, in part
+because it uses **binary strings**.
+
+Cowboy provides **routing** capabilities, selectively dispatching requests
+to handlers written in Erlang.
+
+Because it uses Ranch for managing connections, Cowboy can easily be
+**embedded** in any other application.
+
+No parameterized module. No process dictionary. **Clean** Erlang code.
+
+Sponsors
+--------
+
+The SPDY implementation was sponsored by
+[LeoFS Cloud Storage](http://www.leofs.org).
+
+The project is currently sponsored by
+[Kato.im](https://kato.im).
+
+Online documentation
+--------------------
+
+ *  [User guide](http://ninenines.eu/docs/en/cowboy/HEAD/guide)
+ *  [Function reference](http://ninenines.eu/docs/en/cowboy/HEAD/manual)
+
+Offline documentation
+---------------------
+
+ *  While still online, run `make docs`
+ *  Function reference man pages available in `doc/man3/` and `doc/man7/`
+ *  Run `make install-docs` to install man pages on your system
+ *  Full documentation in Markdown available in `doc/markdown/`
+ *  Examples available in `examples/`
+
+Getting help
+------------
+
+ *  Official IRC Channel: #ninenines on irc.freenode.net
+ *  [Mailing Lists](http://lists.ninenines.eu)
+ *  [Commercial Support](http://ninenines.eu/support)
diff --git a/deps/cowboy/ROADMAP.md b/deps/cowboy/ROADMAP.md
new file mode 100644 (file)
index 0000000..db94461
--- /dev/null
@@ -0,0 +1,127 @@
+ROADMAP
+=======
+
+This document explains in as much details as possible the
+list of planned changes and work to be done on the Cowboy
+server. It is intended to be exhaustive but some elements
+might still be missing.
+
+2.0 (R17 and R18)
+-----------------
+
+The main features that will be added to Cowboy 2.0 are support
+for HTTP/2.0 and Websocket permessage deflate compression.
+
+A complete analysis of the httpbis set of specifications
+will be performed and extensive tests will be written to
+ensure maximum compatibility.
+
+A number of backward incompatible changes are planned. These
+changes are individually small, but together should result
+in a large improvement in usability.
+
+### cowboy_req
+
+The interface of `cowboy_req` will be largely changed. The
+number one complaint about Cowboy today is that you have
+to keep track of the Req whenever you do anything. The new
+interface will minimize that.
+
+All functions will return a single term, excluding the body
+reading functions `body/{1,2}`, `body_qs/{1,2}`, `part/{1,2}`,
+`part_body/{1,2}`.
+
+Of the functions returning a single term, some of them will
+return a Req object. This includes the functions that already
+return Req: `compact/1`, `delete_resp_header/2`, `set_meta/3`,
+`set_resp_body/2`, `set_resp_body_fun/{2,3}`, `set_resp_cookie/4`,
+`set_resp_header/3`, and adds the `chunked_reply/{2,3}` and
+`reply/{2,3,4}` functions to the list.
+
+Of note is that this will allow chaining all the response
+functions if that's what you fancy.
+
+The `parse_header/{2,3}` function will now only return the
+parsed header value, and crash on error. It will also not
+cache the parsed value anymore, except for headers that Cowboy
+requires, like the connection header.
+
+It is unsure what will become of the `qs_val`, `qs_vals`,
+`cookie` and `cookies` functions. The main idea at this point
+is to replace them with a `parse_qs/2` and `parse_cookies/2`
+that would return the parsed list, and let the user decide
+how to access it.
+
+### init/terminate unification
+
+The first argument of the `init/3` function is too rarely used.
+It will be removed.
+
+The return value of the `init/2` function will become
+`{http, Req, State} | {loop, Req, State} | {Module, Req, State}`
+with `Module` being `cowboy_rest`, `cowboy_websocket` or a
+user provided module.
+
+The `rest_init` and `websocket_init` callbacks will be removed
+as they become unnecessary with the new `init/2` interface.
+
+Similarly, the `rest_terminate` and `websocket_terminate`
+callbacks will be removed in favor of a unified `terminate/3`.
+
+The `terminate/3` callback will become optional.
+
+### Middlewares
+
+The error tuple return value brings little value compared to
+the halt tuple. The error tuple will therefore be removed.
+
+### Hooks
+
+The `onrequest` hook will be removed. It can easily be replaced
+by a middleware.
+
+The interface of the `onresponse` hook will change. There has
+been a number of issues and added complexity with the current
+interface that warrant fixing. The main problem is that the
+hook may be used to change the reply, by calling the reply
+function again, forcing us to be careful not to reprocess
+everything again.
+
+To fix that, we will cut the reply mechanism in two steps,
+one that is basically some preprocessing of the response
+header to follow the protocol requirements, and then the
+actual response. The `onresponse` hook will fit in the
+middle, being called from the first step and calling the
+second step itself.
+
+If a body streaming function is provided, the hook will
+also receive it (unlike today). It will not be able to
+inspect its contents however.
+
+This should greatly simplify the code and allow users to
+do any operation they wish.
+
+### Low-level interface documented
+
+A special chapter of the manual will document a low-level
+interface that may be used in middlewares or hooks (but
+nowhere else). This includes the Req access and update
+functions and the new response function described above.
+
+### REST
+
+The `known_content_type` callback has no purpose, so it
+is going to be removed.
+
+The documentation for all REST callbacks will be updated
+to describe whether they can have side effects. This will
+allows us to build introspection tools on top of a working
+REST API.
+
+Range support will be added.
+
+Under consideration
+-------------------
+
+ *  Convenience API for extracting query string and body
+    information, similar to PHP's $_GET, $_POST and $_FILES
diff --git a/deps/cowboy/all.sh b/deps/cowboy/all.sh
new file mode 100755 (executable)
index 0000000..32b8302
--- /dev/null
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+KERL_INSTALL_PATH=~/erlang
+KERL_RELEASES="r16b01 r16b02 r16b03-1 17.0 17.1.2"
+
+make build-ct-suites
+
+for rel in $KERL_RELEASES
+do
+       echo
+       echo "    TESTING $rel"
+       echo
+       . $KERL_INSTALL_PATH/$rel/activate
+       cp ~/.kerl/builds/$rel/otp_src_*/lib/ssl/test/erl_make_certs.erl \
+               deps/ct_helper/src/
+       CT_OPTS="-label $rel" make tests
+done
+
+xdg-open logs/all_runs.html
diff --git a/deps/cowboy/circle.yml b/deps/cowboy/circle.yml
new file mode 100644 (file)
index 0000000..fa31fe2
--- /dev/null
@@ -0,0 +1,22 @@
+general:
+  artifacts:
+    - "logs"
+
+dependencies:
+  cache_directories:
+    - "~/.kerl"
+    - "~/erlang"
+
+  pre:
+    - sudo pip install autobahntestsuite
+    - sudo apt-get install autoconf2.59
+    - cd $HOME/bin && ln -s /usr/bin/autoconf2.59 autoconf
+    - cd $HOME/bin && ln -s /usr/bin/autoheader2.59 autoheader
+    - make ci-prepare:
+        timeout: 7200
+
+test:
+  override:
+    - source $HOME/erlang/OTP-18.0.2/activate && make dialyze
+    - make -k ci:
+        timeout: 3600
diff --git a/deps/cowboy/erlang.mk b/deps/cowboy/erlang.mk
new file mode 100644 (file)
index 0000000..8930dfc
--- /dev/null
@@ -0,0 +1 @@
+include ../../erlang.mk
diff --git a/deps/cowboy/rebar.config b/deps/cowboy/rebar.config
new file mode 100644 (file)
index 0000000..b9b2ba0
--- /dev/null
@@ -0,0 +1,4 @@
+{deps, [
+       {cowlib, ".*", {git, "https://github.com/ninenines/cowlib.git", "1.0.0"}},
+       {ranch, ".*", {git, "https://github.com/ninenines/ranch.git", "1.0.0"}}
+]}.
diff --git a/deps/cowboy/src/cowboy.app.src b/deps/cowboy/src/cowboy.app.src
new file mode 100644 (file)
index 0000000..b3f3c56
--- /dev/null
@@ -0,0 +1,9 @@
+{application,cowboy,
+             [{description,"Small, fast, modular HTTP server."},
+              {vsn,"1.0.3"},
+              {id,"git"},
+              {modules,[]},
+              {registered,[cowboy_clock,cowboy_sup]},
+              {applications,[kernel,stdlib,ranch,cowlib,crypto]},
+              {mod,{cowboy_app,[]}},
+              {env,[]}]}.
diff --git a/deps/cowboy/src/cowboy.erl b/deps/cowboy/src/cowboy.erl
new file mode 100644 (file)
index 0000000..2b50dfb
--- /dev/null
@@ -0,0 +1,75 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy).
+
+-export([start_http/4]).
+-export([start_https/4]).
+-export([start_spdy/4]).
+-export([stop_listener/1]).
+-export([set_env/3]).
+
+-type http_headers() :: [{binary(), iodata()}].
+-export_type([http_headers/0]).
+
+-type http_status() :: non_neg_integer() | binary().
+-export_type([http_status/0]).
+
+-type http_version() :: 'HTTP/1.1' | 'HTTP/1.0'.
+-export_type([http_version/0]).
+
+-type onrequest_fun() :: fun((Req) -> Req).
+-export_type([onrequest_fun/0]).
+
+-type onresponse_fun() ::
+       fun((http_status(), http_headers(), iodata(), Req) -> Req).
+-export_type([onresponse_fun/0]).
+
+-spec start_http(ranch:ref(), non_neg_integer(), ranch_tcp:opts(),
+       cowboy_protocol:opts()) -> {ok, pid()} | {error, any()}.
+start_http(Ref, NbAcceptors, TransOpts, ProtoOpts)
+               when is_integer(NbAcceptors), NbAcceptors > 0 ->
+       ranch:start_listener(Ref, NbAcceptors,
+               ranch_tcp, TransOpts, cowboy_protocol, ProtoOpts).
+
+-spec start_https(ranch:ref(), non_neg_integer(), ranch_ssl:opts(),
+       cowboy_protocol:opts()) -> {ok, pid()} | {error, any()}.
+start_https(Ref, NbAcceptors, TransOpts, ProtoOpts)
+               when is_integer(NbAcceptors), NbAcceptors > 0 ->
+       ranch:start_listener(Ref, NbAcceptors,
+               ranch_ssl, TransOpts, cowboy_protocol, ProtoOpts).
+
+-spec start_spdy(ranch:ref(), non_neg_integer(), ranch_ssl:opts(),
+       cowboy_spdy:opts()) -> {ok, pid()} | {error, any()}.
+start_spdy(Ref, NbAcceptors, TransOpts, ProtoOpts)
+               when is_integer(NbAcceptors), NbAcceptors > 0 ->
+       TransOpts2 = [
+               {connection_type, supervisor},
+               {next_protocols_advertised,
+                       [<<"spdy/3">>, <<"http/1.1">>, <<"http/1.0">>]}
+       |TransOpts],
+       ranch:start_listener(Ref, NbAcceptors,
+               ranch_ssl, TransOpts2, cowboy_spdy, ProtoOpts).
+
+-spec stop_listener(ranch:ref()) -> ok | {error, not_found}.
+stop_listener(Ref) ->
+       ranch:stop_listener(Ref).
+
+-spec set_env(ranch:ref(), atom(), any()) -> ok.
+set_env(Ref, Name, Value) ->
+       Opts = ranch:get_protocol_options(Ref),
+       {_, Env} = lists:keyfind(env, 1, Opts),
+       Opts2 = lists:keyreplace(env, 1, Opts,
+               {env, lists:keystore(Name, 1, Env, {Name, Value})}),
+       ok = ranch:set_protocol_options(Ref, Opts2).
similarity index 71%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.app.src
rename to deps/cowboy/src/cowboy_app.erl
index 9b3ee50db260cf0a0da5fc7963f6586a4bd2b87e..1161d91bb72d1f4c0acc547475b7e7c73ff21ca9 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-{application, cowboy, [
-       {description, "Small, fast, modular HTTP server."},
-       {vsn, "0.5.0"},
-       {modules, []},
-       {registered, [cowboy_clock, cowboy_sup]},
-       {applications, [
-               kernel,
-               stdlib
-       ]},
-       {mod, {cowboy_app, []}},
-       {env, []}
-]}.
+-module(cowboy_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+-spec start(_, _) -> {ok, pid()}.
+start(_, _) ->
+       cowboy_sup:start_link().
+
+-spec stop(_) -> ok.
+stop(_) ->
+       ok.
similarity index 54%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_bstr.erl
rename to deps/cowboy/src/cowboy_bstr.erl
index 1c702efb4143908bc13ac015f9460c61cba67deb..98d2cf7bcacced42e6e7cda09f4b983fa7f16841 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-%% @doc Binary string manipulation.
 -module(cowboy_bstr).
 
--export([to_lower/1]). %% Binary strings.
--export([char_to_lower/1, char_to_upper/1]). %% Characters.
+%% Binary strings.
+-export([capitalize_token/1]).
+-export([to_lower/1]).
+-export([to_upper/1]).
 
-%% @doc Convert a binary string to lowercase.
--spec to_lower(binary()) -> binary().
-to_lower(L) ->
-       << << (char_to_lower(C)) >> || << C >> <= L >>.
+%% Characters.
+-export([char_to_lower/1]).
+-export([char_to_upper/1]).
+
+%% The first letter and all letters after a dash are capitalized.
+%% This is the form seen for header names in the HTTP/1.1 RFC and
+%% others. Note that using this form isn't required, as header names
+%% are case insensitive, and it is only provided for use with eventual
+%% badly implemented clients.
+-spec capitalize_token(B) -> B when B::binary().
+capitalize_token(B) ->
+       capitalize_token(B, true, <<>>).
+capitalize_token(<<>>, _, Acc) ->
+       Acc;
+capitalize_token(<< $-, Rest/bits >>, _, Acc) ->
+       capitalize_token(Rest, true, << Acc/binary, $- >>);
+capitalize_token(<< C, Rest/bits >>, true, Acc) ->
+       capitalize_token(Rest, false, << Acc/binary, (char_to_upper(C)) >>);
+capitalize_token(<< C, Rest/bits >>, false, Acc) ->
+       capitalize_token(Rest, false, << Acc/binary, (char_to_lower(C)) >>).
+
+-spec to_lower(B) -> B when B::binary().
+to_lower(B) ->
+       << << (char_to_lower(C)) >> || << C >> <= B >>.
+
+-spec to_upper(B) -> B when B::binary().
+to_upper(B) ->
+       << << (char_to_upper(C)) >> || << C >> <= B >>.
 
-%% @doc Convert [A-Z] characters to lowercase.
-%% @end
-%% We gain noticeable speed by matching each value directly.
 -spec char_to_lower(char()) -> char().
 char_to_lower($A) -> $a;
 char_to_lower($B) -> $b;
@@ -55,7 +77,6 @@ char_to_lower($Y) -> $y;
 char_to_lower($Z) -> $z;
 char_to_lower(Ch) -> Ch.
 
-%% @doc Convert [a-z] characters to uppercase.
 -spec char_to_upper(char()) -> char().
 char_to_upper($a) -> $A;
 char_to_upper($b) -> $B;
@@ -84,3 +105,19 @@ char_to_upper($x) -> $X;
 char_to_upper($y) -> $Y;
 char_to_upper($z) -> $Z;
 char_to_upper(Ch) -> Ch.
+
+%% Tests.
+
+-ifdef(TEST).
+capitalize_token_test_() ->
+       Tests = [
+               {<<"heLLo-woRld">>, <<"Hello-World">>},
+               {<<"Sec-Websocket-Version">>, <<"Sec-Websocket-Version">>},
+               {<<"Sec-WebSocket-Version">>, <<"Sec-Websocket-Version">>},
+               {<<"sec-websocket-version">>, <<"Sec-Websocket-Version">>},
+               {<<"SEC-WEBSOCKET-VERSION">>, <<"Sec-Websocket-Version">>},
+               {<<"Sec-WebSocket--Version">>, <<"Sec-Websocket--Version">>},
+               {<<"Sec-WebSocket---Version">>, <<"Sec-Websocket---Version">>}
+       ],
+       [{H, fun() -> R = capitalize_token(H) end} || {H, R} <- Tests].
+-endif.
similarity index 74%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_clock.erl
rename to deps/cowboy/src/cowboy_clock.erl
index c699f4f4c400592d1741c618114fad90747c2482..5d9a41a4ca3ea86a4ec8682c6bedbc07c5ccae76 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
@@ -12,8 +12,6 @@
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-%% @doc Date and time related functions.
-%%
 %% While a gen_server process runs in the background to update
 %% the cache of formatted dates every second, all API calls are
 %% local and directly read from the ETS cache table, providing
 -module(cowboy_clock).
 -behaviour(gen_server).
 
--export([start_link/0, stop/0, rfc1123/0, rfc2109/1]). %% API.
--export([init/1, handle_call/3, handle_cast/2,
-       handle_info/2, terminate/2, code_change/3]). %% gen_server.
+%% API.
+-export([start_link/0]).
+-export([stop/0]).
+-export([rfc1123/0]).
+-export([rfc1123/1]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
 
 -record(state, {
        universaltime = undefined :: undefined | calendar:datetime(),
        rfc1123 = <<>> :: binary(),
-       tref = undefined :: undefined | timer:tref()
+       tref = undefined :: undefined | reference()
 }).
 
--define(SERVER, ?MODULE).
--define(TABLE, ?MODULE).
-
--include_lib("eunit/include/eunit.hrl").
-
 %% API.
 
-%% @private
 -spec start_link() -> {ok, pid()}.
 start_link() ->
-       gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
+       gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
 
-%% @private
 -spec stop() -> stopped.
 stop() ->
-       gen_server:call(?SERVER, stop).
+       gen_server:call(?MODULE, stop).
 
-%% @doc Return the current date and time formatted according to RFC-1123.
-%%
-%% This format is used in the <em>'Date'</em> header sent with HTTP responses.
 -spec rfc1123() -> binary().
 rfc1123() ->
-       ets:lookup_element(?TABLE, rfc1123, 2).
+       ets:lookup_element(?MODULE, rfc1123, 2).
 
-%% @doc Return the current date and time formatted according to RFC-2109.
-%%
-%% This format is used in the <em>'Set-Cookie'</em> header sent with
-%% HTTP responses.
--spec rfc2109(calendar:datetime()) -> binary().
-rfc2109(LocalTime) ->
-       {{YYYY,MM,DD},{Hour,Min,Sec}} =
-       case calendar:local_time_to_universal_time_dst(LocalTime) of
-           [Gmt]   -> Gmt;
-           [_,Gmt] -> Gmt
-       end,
-       Wday = calendar:day_of_the_week({YYYY,MM,DD}),
-       DayBin = pad_int(DD),
-       YearBin = list_to_binary(integer_to_list(YYYY)),
-       HourBin = pad_int(Hour),
-       MinBin = pad_int(Min),
-       SecBin = pad_int(Sec),
-       WeekDay = weekday(Wday),
-       Month = month(MM),
-       <<WeekDay/binary, ", ",
-       DayBin/binary, " ", Month/binary, " ",
-       YearBin/binary, " ",
-       HourBin/binary, ":",
-       MinBin/binary, ":",
-       SecBin/binary, " GMT">>.
+-spec rfc1123(calendar:datetime()) -> binary().
+rfc1123(DateTime) ->
+       update_rfc1123(<<>>, undefined, DateTime).
 
 %% gen_server.
 
-%% @private
 -spec init([]) -> {ok, #state{}}.
 init([]) ->
-       ?TABLE = ets:new(?TABLE, [set, protected,
+       ?MODULE = ets:new(?MODULE, [set, protected,
                named_table, {read_concurrency, true}]),
        T = erlang:universaltime(),
        B = update_rfc1123(<<>>, undefined, T),
-       {ok, TRef} = timer:send_interval(1000, update),
-       ets:insert(?TABLE, {rfc1123, B}),
+       TRef = erlang:send_after(1000, self(), update),
+       ets:insert(?MODULE, {rfc1123, B}),
        {ok, #state{universaltime=T, rfc1123=B, tref=TRef}}.
 
-%% @private
--spec handle_call(_, _, State)
-       -> {reply, ignored, State} | {stop, normal, stopped, State}.
-handle_call(stop, _From, State=#state{tref=TRef}) ->
-       {ok, cancel} = timer:cancel(TRef),
+-type from() :: {pid(), term()}.
+-spec handle_call
+       (stop, from(), State) -> {stop, normal, stopped, State}
+       when State::#state{}.
+handle_call(stop, _From, State) ->
        {stop, normal, stopped, State};
 handle_call(_Request, _From, State) ->
        {reply, ignored, State}.
 
-%% @private
--spec handle_cast(_, State) -> {noreply, State}.
+-spec handle_cast(_, State) -> {noreply, State} when State::#state{}.
 handle_cast(_Msg, State) ->
        {noreply, State}.
 
-%% @private
--spec handle_info(_, State) -> {noreply, State}.
-handle_info(update, #state{universaltime=Prev, rfc1123=B1, tref=TRef}) ->
+-spec handle_info(any(), State) -> {noreply, State} when State::#state{}.
+handle_info(update, #state{universaltime=Prev, rfc1123=B1, tref=TRef0}) ->
+       %% Cancel the timer in case an external process sent an update message.
+       _ = erlang:cancel_timer(TRef0),
        T = erlang:universaltime(),
        B2 = update_rfc1123(B1, Prev, T),
-       ets:insert(?TABLE, {rfc1123, B2}),
+       ets:insert(?MODULE, {rfc1123, B2}),
+       TRef = erlang:send_after(1000, self(), update),
        {noreply, #state{universaltime=T, rfc1123=B2, tref=TRef}};
 handle_info(_Info, State) ->
        {noreply, State}.
 
-%% @private
 -spec terminate(_, _) -> ok.
 terminate(_Reason, _State) ->
        ok.
 
-%% @private
--spec code_change(_, State, _) -> {ok, State}.
+-spec code_change(_, State, _) -> {ok, State} when State::#state{}.
 code_change(_OldVsn, State, _Extra) ->
        {ok, State}.
 
@@ -159,7 +133,7 @@ update_rfc1123(<< _:11/binary, Keep:6/binary, _/bits >>,
 update_rfc1123(_, _, {Date = {Y, Mo, D}, {H, M, S}}) ->
        Wday = calendar:day_of_the_week(Date),
        << (weekday(Wday))/binary, ", ", (pad_int(D))/binary, " ",
-               (month(Mo))/binary, " ", (list_to_binary(integer_to_list(Y)))/binary,
+               (month(Mo))/binary, " ", (integer_to_binary(Y))/binary,
                " ", (pad_int(H))/binary, $:, (pad_int(M))/binary,
                $:, (pad_int(S))/binary, " GMT" >>.
 
@@ -168,7 +142,7 @@ update_rfc1123(_, _, {Date = {Y, Mo, D}, {H, M, S}}) ->
 pad_int(X) when X < 10 ->
        << $0, ($0 + X) >>;
 pad_int(X) ->
-       list_to_binary(integer_to_list(X)).
+       integer_to_binary(X).
 
 -spec weekday(1..7) -> <<_:24>>.
 weekday(1) -> <<"Mon">>;
@@ -196,7 +170,6 @@ month(12) -> <<"Dec">>.
 %% Tests.
 
 -ifdef(TEST).
-
 update_rfc1123_test_() ->
        Tests = [
                {<<"Sat, 14 May 2011 14:25:33 GMT">>, undefined,
@@ -237,5 +210,4 @@ pad_int_test_() ->
                {56, <<"56">>}, {57, <<"57">>}, {58, <<"58">>}, {59, <<"59">>}
        ],
        [{I, fun() -> O = pad_int(I) end} || {I, O} <- Tests].
-
 -endif.
diff --git a/deps/cowboy/src/cowboy_handler.erl b/deps/cowboy/src/cowboy_handler.erl
new file mode 100644 (file)
index 0000000..5eb16b4
--- /dev/null
@@ -0,0 +1,304 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% Handler middleware.
+%%
+%% Execute the handler given by the <em>handler</em> and <em>handler_opts</em>
+%% environment values. The result of this execution is added to the
+%% environment under the <em>result</em> value.
+%%
+%% When using loop handlers, we are receiving data from the socket because we
+%% want to know when the socket gets closed. This is generally not an issue
+%% because these kinds of requests are generally not pipelined, and don't have
+%% a body. If they do have a body, this body is often read in the
+%% <em>init/3</em> callback and this is no problem. Otherwise, this data
+%% accumulates in a buffer until we reach a certain threshold of 5000 bytes
+%% by default. This can be configured through the <em>loop_max_buffer</em>
+%% environment value. The request will be terminated with an
+%% <em>{error, overflow}</em> reason if this threshold is reached.
+-module(cowboy_handler).
+-behaviour(cowboy_middleware).
+
+-export([execute/2]).
+-export([handler_loop/4]).
+
+-record(state, {
+       env :: cowboy_middleware:env(),
+       hibernate = false :: boolean(),
+       loop_buffer_size = 0 :: non_neg_integer(),
+       loop_max_buffer = 5000 :: non_neg_integer() | infinity,
+       loop_timeout = infinity :: timeout(),
+       loop_timeout_ref = undefined :: undefined | reference(),
+       resp_sent = false :: boolean()
+}).
+
+-spec execute(Req, Env)
+       -> {ok, Req, Env} | {suspend, ?MODULE, handler_loop, [any()]}
+       when Req::cowboy_req:req(), Env::cowboy_middleware:env().
+execute(Req, Env) ->
+       {_, Handler} = lists:keyfind(handler, 1, Env),
+       {_, HandlerOpts} = lists:keyfind(handler_opts, 1, Env),
+       MaxBuffer = case lists:keyfind(loop_max_buffer, 1, Env) of
+               false -> 5000;
+               {_, MaxBuffer0} -> MaxBuffer0
+       end,
+       handler_init(Req, #state{env=Env, loop_max_buffer=MaxBuffer},
+               Handler, HandlerOpts).
+
+-spec handler_init(Req, #state{}, module(), any())
+       -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_init(Req, State, Handler, HandlerOpts) ->
+       Transport = cowboy_req:get(transport, Req),
+       try Handler:init({Transport:name(), http}, Req, HandlerOpts) of
+               {ok, Req2, HandlerState} ->
+                       handler_handle(Req2, State, Handler, HandlerState);
+               {loop, Req2, HandlerState} ->
+                       handler_after_callback(Req2, State, Handler, HandlerState);
+               {loop, Req2, HandlerState, hibernate} ->
+                       handler_after_callback(Req2, State#state{hibernate=true},
+                               Handler, HandlerState);
+               {loop, Req2, HandlerState, Timeout} ->
+                       State2 = handler_loop_timeout(State#state{loop_timeout=Timeout}),
+                       handler_after_callback(Req2, State2, Handler, HandlerState);
+               {loop, Req2, HandlerState, Timeout, hibernate} ->
+                       State2 = handler_loop_timeout(State#state{
+                               hibernate=true, loop_timeout=Timeout}),
+                       handler_after_callback(Req2, State2, Handler, HandlerState);
+               {shutdown, Req2, HandlerState} ->
+                       terminate_request(Req2, State, Handler, HandlerState,
+                               {normal, shutdown});
+               {upgrade, protocol, Module} ->
+                       upgrade_protocol(Req, State, Handler, HandlerOpts, Module);
+               {upgrade, protocol, Module, Req2, HandlerOpts2} ->
+                       upgrade_protocol(Req2, State, Handler, HandlerOpts2, Module)
+       catch Class:Reason ->
+               Stacktrace = erlang:get_stacktrace(),
+               cowboy_req:maybe_reply(Stacktrace, Req),
+               erlang:Class([
+                       {reason, Reason},
+                       {mfa, {Handler, init, 3}},
+                       {stacktrace, Stacktrace},
+                       {req, cowboy_req:to_list(Req)},
+                       {opts, HandlerOpts}
+               ])
+       end.
+
+-spec upgrade_protocol(Req, #state{}, module(), any(), module())
+       -> {ok, Req, Env}
+       | {suspend, module(), atom(), any()}
+       | {halt, Req}
+       | {error, cowboy:http_status(), Req}
+       when Req::cowboy_req:req(), Env::cowboy_middleware:env().
+upgrade_protocol(Req, #state{env=Env},
+               Handler, HandlerOpts, Module) ->
+       Module:upgrade(Req, Env, Handler, HandlerOpts).
+
+-spec handler_handle(Req, #state{}, module(), any())
+       -> {ok, Req, cowboy_middleware:env()} when Req::cowboy_req:req().
+handler_handle(Req, State, Handler, HandlerState) ->
+       try Handler:handle(Req, HandlerState) of
+               {ok, Req2, HandlerState2} ->
+                       terminate_request(Req2, State, Handler, HandlerState2,
+                               {normal, shutdown})
+       catch Class:Reason ->
+               Stacktrace = erlang:get_stacktrace(),
+               cowboy_req:maybe_reply(Stacktrace, Req),
+               handler_terminate(Req, Handler, HandlerState, Reason),
+               erlang:Class([
+                       {reason, Reason},
+                       {mfa, {Handler, handle, 2}},
+                       {stacktrace, Stacktrace},
+                       {req, cowboy_req:to_list(Req)},
+                       {state, HandlerState}
+               ])
+       end.
+
+%% Update the state if the response was sent in the callback.
+-spec handler_after_callback(Req, #state{}, module(), any())
+       -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_after_callback(Req, State=#state{resp_sent=false}, Handler,
+               HandlerState) ->
+       receive
+               {cowboy_req, resp_sent} ->
+                       handler_before_loop(Req, State#state{resp_sent=true}, Handler,
+                               HandlerState)
+       after 0 ->
+               handler_before_loop(Req, State, Handler, HandlerState)
+       end;
+handler_after_callback(Req, State, Handler, HandlerState) ->
+       handler_before_loop(Req, State, Handler, HandlerState).
+
+-spec handler_before_loop(Req, #state{}, module(), any())
+       -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_before_loop(Req, State=#state{hibernate=true}, Handler, HandlerState) ->
+       [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+       Transport:setopts(Socket, [{active, once}]),
+       {suspend, ?MODULE, handler_loop,
+               [Req, State#state{hibernate=false}, Handler, HandlerState]};
+handler_before_loop(Req, State, Handler, HandlerState) ->
+       [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+       Transport:setopts(Socket, [{active, once}]),
+       handler_loop(Req, State, Handler, HandlerState).
+
+%% Almost the same code can be found in cowboy_websocket.
+-spec handler_loop_timeout(#state{}) -> #state{}.
+handler_loop_timeout(State=#state{loop_timeout=infinity}) ->
+       State#state{loop_timeout_ref=undefined};
+handler_loop_timeout(State=#state{loop_timeout=Timeout,
+               loop_timeout_ref=PrevRef}) ->
+       _ = case PrevRef of
+               undefined -> ignore;
+               PrevRef -> erlang:cancel_timer(PrevRef)
+       end,
+       TRef = erlang:start_timer(Timeout, self(), ?MODULE),
+       State#state{loop_timeout_ref=TRef}.
+
+-spec handler_loop(Req, #state{}, module(), any())
+       -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_loop(Req, State=#state{loop_buffer_size=NbBytes,
+               loop_max_buffer=Threshold, loop_timeout_ref=TRef,
+               resp_sent=RespSent}, Handler, HandlerState) ->
+       [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+       {OK, Closed, Error} = Transport:messages(),
+       receive
+               {OK, Socket, Data} ->
+                       NbBytes2 = NbBytes + byte_size(Data),
+                       if      NbBytes2 > Threshold ->
+                                       _ = handler_terminate(Req, Handler, HandlerState,
+                                               {error, overflow}),
+                                       _ = if RespSent -> ok; true ->
+                                               cowboy_req:reply(500, Req)
+                                       end,
+                                       exit(normal);
+                               true ->
+                                       Req2 = cowboy_req:append_buffer(Data, Req),
+                                       State2 = handler_loop_timeout(State#state{
+                                               loop_buffer_size=NbBytes2}),
+                                       handler_before_loop(Req2, State2, Handler, HandlerState)
+                       end;
+               {Closed, Socket} ->
+                       terminate_request(Req, State, Handler, HandlerState,
+                               {error, closed});
+               {Error, Socket, Reason} ->
+                       terminate_request(Req, State, Handler, HandlerState,
+                               {error, Reason});
+               {timeout, TRef, ?MODULE} ->
+                       handler_after_loop(Req, State, Handler, HandlerState,
+                               {normal, timeout});
+               {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) ->
+                       handler_loop(Req, State, Handler, HandlerState);
+               Message ->
+                       %% We set the socket back to {active, false} mode in case
+                       %% the handler is going to call recv. We also flush any
+                       %% data received after that and put it into the buffer.
+                       %% We do not check the size here, if data keeps coming
+                       %% we'll error out on the next packet received.
+                       Transport:setopts(Socket, [{active, false}]),
+                       Req2 = receive {OK, Socket, Data} ->
+                               cowboy_req:append_buffer(Data, Req)
+                       after 0 ->
+                               Req
+                       end,
+                       handler_call(Req2, State, Handler, HandlerState, Message)
+       end.
+
+-spec handler_call(Req, #state{}, module(), any(), any())
+       -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_call(Req, State=#state{resp_sent=RespSent},
+               Handler, HandlerState, Message) ->
+       try Handler:info(Message, Req, HandlerState) of
+               {ok, Req2, HandlerState2} ->
+                       handler_after_loop(Req2, State, Handler, HandlerState2,
+                               {normal, shutdown});
+               {loop, Req2, HandlerState2} ->
+                       handler_after_callback(Req2, State, Handler, HandlerState2);
+               {loop, Req2, HandlerState2, hibernate} ->
+                       handler_after_callback(Req2, State#state{hibernate=true},
+                               Handler, HandlerState2)
+       catch Class:Reason ->
+               Stacktrace = erlang:get_stacktrace(),
+               if RespSent -> ok; true ->
+                       cowboy_req:maybe_reply(Stacktrace, Req)
+               end,
+               handler_terminate(Req, Handler, HandlerState, Reason),
+               erlang:Class([
+                       {reason, Reason},
+                       {mfa, {Handler, info, 3}},
+                       {stacktrace, Stacktrace},
+                       {req, cowboy_req:to_list(Req)},
+                       {state, HandlerState}
+               ])
+       end.
+
+%% It is sometimes important to make a socket passive as it was initially
+%% and as it is expected to be by cowboy_protocol, right after we're done
+%% with loop handling. The browser may freely pipeline a bunch of requests
+%% if previous one was, say, a JSONP long-polling request.
+-spec handler_after_loop(Req, #state{}, module(), any(),
+       {normal, timeout | shutdown} | {error, atom()}) ->
+       {ok, Req, cowboy_middleware:env()} when Req::cowboy_req:req().
+handler_after_loop(Req, State, Handler, HandlerState, Reason) ->
+       [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+       Transport:setopts(Socket, [{active, false}]),
+       {OK, _Closed, _Error} = Transport:messages(),
+       Req2 = receive
+               {OK, Socket, Data} ->
+                       cowboy_req:append_buffer(Data, Req)
+       after 0 ->
+               Req
+       end,
+       terminate_request(Req2, State, Handler, HandlerState, Reason).
+
+-spec terminate_request(Req, #state{}, module(), any(),
+       {normal, timeout | shutdown} | {error, atom()}) ->
+       {ok, Req, cowboy_middleware:env()} when Req::cowboy_req:req().
+terminate_request(Req, #state{env=Env, loop_timeout_ref=TRef},
+               Handler, HandlerState, Reason) ->
+       HandlerRes = handler_terminate(Req, Handler, HandlerState, Reason),
+       _ = case TRef of
+               undefined -> ignore;
+               TRef -> erlang:cancel_timer(TRef)
+       end,
+       flush_timeouts(),
+       {ok, Req, [{result, HandlerRes}|Env]}.
+
+-spec handler_terminate(cowboy_req:req(), module(), any(),
+       {normal, timeout | shutdown} | {error, atom()}) -> ok.
+handler_terminate(Req, Handler, HandlerState, Reason) ->
+       try
+               Handler:terminate(Reason, cowboy_req:lock(Req), HandlerState)
+       catch Class:Reason2 ->
+               erlang:Class([
+                       {reason, Reason2},
+                       {mfa, {Handler, terminate, 3}},
+                       {stacktrace, erlang:get_stacktrace()},
+                       {req, cowboy_req:to_list(Req)},
+                       {state, HandlerState},
+                       {terminate_reason, Reason}
+               ])
+       end.
+
+-spec flush_timeouts() -> ok.
+flush_timeouts() ->
+       receive
+               {timeout, TRef, ?MODULE} when is_reference(TRef) ->
+                       flush_timeouts()
+       after 0 ->
+               ok
+       end.
similarity index 69%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http.erl
rename to deps/cowboy/src/cowboy_http.erl
index d7261c88a4c87a69177060a99693f1c92578cccb..e2760e9e59502959422648714f13a58dec934299 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 %% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-%% @doc Core HTTP parsing API.
+%% Deprecated HTTP parsing API.
 -module(cowboy_http).
 
 %% Parsing.
--export([list/2, nonempty_list/2, content_type/1, content_type_params/3,
-       media_range/2, conneg/2, language_range/2, entity_tag_match/1,
-       http_date/1, rfc1123_date/1, rfc850_date/1, asctime_date/1,
-       whitespace/2, digits/1, token/2, token_ci/2, quoted_string/2]).
-
-%% Interpretation.
--export([connection_to_atom/1, urldecode/1, urldecode/2, urlencode/1,
-       urlencode/2]).
-
--type method() :: 'OPTIONS' | 'GET' | 'HEAD'
-       | 'POST' | 'PUT' | 'DELETE' | 'TRACE' | binary().
--type uri() :: '*' | {absoluteURI, http | https, Host::binary(),
-       Port::integer() | undefined, Path::binary()}
-       | {scheme, Scheme::binary(), binary()}
-       | {abs_path, binary()} | binary().
--type version() :: {Major::non_neg_integer(), Minor::non_neg_integer()}.
--type header() :: 'Cache-Control' | 'Connection' | 'Date' | 'Pragma'
-       | 'Transfer-Encoding' | 'Upgrade' | 'Via' | 'Accept' | 'Accept-Charset'
-       | 'Accept-Encoding' | 'Accept-Language' | 'Authorization' | 'From' | 'Host'
-       | 'If-Modified-Since' | 'If-Match' | 'If-None-Match' | 'If-Range'
-       | 'If-Unmodified-Since' | 'Max-Forwards' | 'Proxy-Authorization' | 'Range'
-       | 'Referer' | 'User-Agent' | 'Age' | 'Location' | 'Proxy-Authenticate'
-       | 'Public' | 'Retry-After' | 'Server' | 'Vary' | 'Warning'
-       | 'Www-Authenticate' | 'Allow' | 'Content-Base' | 'Content-Encoding'
-       | 'Content-Language' | 'Content-Length' | 'Content-Location'
-       | 'Content-Md5' | 'Content-Range' | 'Content-Type' | 'Etag'
-       | 'Expires' | 'Last-Modified' | 'Accept-Ranges' | 'Set-Cookie'
-       | 'Set-Cookie2' | 'X-Forwarded-For' | 'Cookie' | 'Keep-Alive'
-       | 'Proxy-Connection' | binary().
--type fake_iodata() :: iolist() | binary().
--type headers() :: [{header(), fake_iodata()}].
--type status() :: non_neg_integer() | binary().
-
--export_type([method/0, uri/0, version/0, header/0, headers/0, status/0]).
-
--include("include/http.hrl").
--include_lib("eunit/include/eunit.hrl").
+-export([list/2]).
+-export([nonempty_list/2]).
+-export([content_type/1]).
+-export([media_range/2]).
+-export([conneg/2]).
+-export([language_range/2]).
+-export([entity_tag_match/1]).
+-export([expectation/2]).
+-export([params/2]).
+-export([http_date/1]).
+-export([rfc1123_date/1]).
+-export([rfc850_date/1]).
+-export([asctime_date/1]).
+-export([whitespace/2]).
+-export([digits/1]).
+-export([token/2]).
+-export([token_ci/2]).
+-export([quoted_string/2]).
+-export([authorization/2]).
+-export([range/1]).
+-export([parameterized_tokens/1]).
+
+%% Decoding.
+-export([ce_identity/1]).
 
 %% Parsing.
 
-%% @doc Parse a non-empty list of the given type.
 -spec nonempty_list(binary(), fun()) -> [any(), ...] | {error, badarg}.
 nonempty_list(Data, Fun) ->
        case list(Data, Fun, []) of
@@ -66,7 +52,6 @@ nonempty_list(Data, Fun) ->
                L -> lists:reverse(L)
        end.
 
-%% @doc Parse a list of the given type.
 -spec list(binary(), fun()) -> list() | {error, badarg}.
 list(Data, Fun) ->
        case list(Data, Fun, []) of
@@ -94,42 +79,27 @@ list(Data, Fun, Acc) ->
                                end)
                end).
 
-%% @doc Parse a content type.
+%% We lowercase the charset header as we know it's case insensitive.
 -spec content_type(binary()) -> any().
 content_type(Data) ->
        media_type(Data,
                fun (Rest, Type, SubType) ->
-                               content_type_params(Rest,
-                                       fun (Params) -> {Type, SubType, Params} end, [])
-               end).
-
--spec content_type_params(binary(), fun(), list({binary(), binary()}))
-       -> any().
-content_type_params(Data, Fun, Acc) ->
-       whitespace(Data,
-               fun (<< $;, Rest/binary >>) -> content_type_param(Rest, Fun, Acc);
-                       (<<>>) -> Fun(lists:reverse(Acc));
-                       (_Rest) -> {error, badarg}
-               end).
-
--spec content_type_param(binary(), fun(), list({binary(), binary()}))
-       -> any().
-content_type_param(Data, Fun, Acc) ->
-       whitespace(Data,
-               fun (Rest) ->
-                               token_ci(Rest,
-                                       fun (_Rest2, <<>>) -> {error, badarg};
-                                               (<< $=, Rest2/binary >>, Attr) ->
-                                                       word(Rest2,
-                                                               fun (Rest3, Value) ->
-                                                                               content_type_params(Rest3, Fun,
-                                                                                       [{Attr, Value}|Acc])
-                                                               end);
-                                               (_Rest2, _Attr) -> {error, badarg}
-                                       end)
+                       params(Rest,
+                               fun (<<>>, Params) ->
+                                               case lists:keyfind(<<"charset">>, 1, Params) of
+                                                       false ->
+                                                               {Type, SubType, Params};
+                                                       {_, Charset} ->
+                                                               Charset2 = cowboy_bstr:to_lower(Charset),
+                                                               Params2 = lists:keyreplace(<<"charset">>,
+                                                                       1, Params, {<<"charset">>, Charset2}),
+                                                               {Type, SubType, Params2}
+                                               end;
+                                       (_Rest2, _) ->
+                                               {error, badarg}
+                               end)
                end).
 
-%% @doc Parse a media range.
 -spec media_range(binary(), fun()) -> any().
 media_range(Data, Fun) ->
        media_type(Data,
@@ -172,7 +142,6 @@ media_range_param_value(Data, Fun, Type, SubType, Acc, Attr) ->
                                Type, SubType, [{Attr, Value}|Acc])
                end).
 
-%% @doc Parse a media type.
 -spec media_type(binary(), fun()) -> any().
 media_type(Data, Fun) ->
        token_ci(Data,
@@ -182,6 +151,13 @@ media_type(Data, Fun) ->
                                        fun (_Rest2, <<>>) -> {error, badarg};
                                                (Rest2, SubType) -> Fun(Rest2, Type, SubType)
                                        end);
+                       %% This is a non-strict parsing clause required by some user agents
+                       %% that use * instead of */* in the list of media types.
+                       (Rest, <<"*">> = Type) ->
+                               token_ci(<<"*", Rest/binary>>,
+                                       fun (_Rest2, <<>>) -> {error, badarg};
+                                               (Rest2, SubType) -> Fun(Rest2, Type, SubType)
+                                       end);
                        (_Rest, _Type) -> {error, badarg}
                end).
 
@@ -225,8 +201,6 @@ accept_ext_value(Data, Fun, Type, SubType, Params, Quality, Acc, Attr) ->
                                        Type, SubType, Params, Quality, [{Attr, Value}|Acc])
                end).
 
-%% @doc Parse a conneg header (Accept-Charset, Accept-Encoding),
-%% followed by an optional quality value.
 -spec conneg(binary(), fun()) -> any().
 conneg(Data, Fun) ->
        token_ci(Data,
@@ -238,7 +212,6 @@ conneg(Data, Fun) ->
                                        end)
                end).
 
-%% @doc Parse a language range, followed by an optional quality value.
 -spec language_range(binary(), fun()) -> any().
 language_range(<< $*, Rest/binary >>, Fun) ->
        language_range_ret(Rest, Fun, '*');
@@ -268,7 +241,7 @@ language_tag(Data, Fun) ->
 
 -spec language_subtag(binary(), fun(), binary(), [binary()]) -> any().
 language_subtag(Data, Fun, Tag, Acc) ->
-       alpha(Data,
+       alphanumeric(Data,
                fun (_Rest, SubTag) when byte_size(SubTag) =:= 0;
                                byte_size(SubTag) > 8 -> {error, badarg};
                        (<< $-, Rest/binary >>, SubTag) ->
@@ -285,18 +258,24 @@ maybe_qparam(Data, Fun) ->
                fun (<< $;, Rest/binary >>) ->
                        whitespace(Rest,
                                fun (Rest2) ->
-                                       qparam(Rest2, Fun)
+                                       %% This is a non-strict parsing clause required by some user agents
+                                       %% that use the wrong delimiter putting a charset where a qparam is
+                                       %% expected.
+                                       try qparam(Rest2, Fun) of
+                                               Result -> Result
+                                       catch
+                                               error:function_clause ->
+                                                       Fun(<<",", Rest2/binary>>, 1000)
+                                       end
                                end);
                        (Rest) ->
                                Fun(Rest, 1000)
                end).
 
-%% @doc Parse a quality parameter string (for example q=0.500).
 -spec qparam(binary(), fun()) -> any().
 qparam(<< Q, $=, Data/binary >>, Fun) when Q =:= $q; Q =:= $Q ->
        qvalue(Data, Fun).
 
-%% @doc Parse either a list of entity tags or a "*".
 -spec entity_tag_match(binary()) -> any().
 entity_tag_match(<< $*, Rest/binary >>) ->
        whitespace(Rest,
@@ -306,7 +285,6 @@ entity_tag_match(<< $*, Rest/binary >>) ->
 entity_tag_match(Data) ->
        nonempty_list(Data, fun entity_tag/2).
 
-%% @doc Parse an entity-tag.
 -spec entity_tag(binary(), fun()) -> any().
 entity_tag(<< "W/", Rest/binary >>, Fun) ->
        opaque_tag(Rest, Fun, weak);
@@ -320,9 +298,52 @@ opaque_tag(Data, Fun, Strength) ->
                        (Rest, OpaqueTag) -> Fun(Rest, {Strength, OpaqueTag})
                end).
 
-%% @doc Parse an HTTP date (RFC1123, RFC850 or asctime date).
-%% @end
-%%
+-spec expectation(binary(), fun()) -> any().
+expectation(Data, Fun) ->
+       token_ci(Data,
+               fun (_Rest, <<>>) -> {error, badarg};
+                       (<< $=, Rest/binary >>, Expectation) ->
+                               word(Rest,
+                                       fun (Rest2, ExtValue) ->
+                                               params(Rest2, fun (Rest3, ExtParams) ->
+                                                       Fun(Rest3, {Expectation, ExtValue, ExtParams})
+                                               end)
+                                       end);
+                       (Rest, Expectation) ->
+                               Fun(Rest, Expectation)
+               end).
+
+-spec params(binary(), fun()) -> any().
+params(Data, Fun) ->
+       params(Data, Fun, []).
+
+-spec params(binary(), fun(), [{binary(), binary()}]) -> any().
+params(Data, Fun, Acc) ->
+       whitespace(Data,
+               fun (<< $;, Rest/binary >>) ->
+                               param(Rest,
+                                       fun (Rest2, Attr, Value) ->
+                                                       params(Rest2, Fun, [{Attr, Value}|Acc])
+                                       end);
+                       (Rest) ->
+                               Fun(Rest, lists:reverse(Acc))
+               end).
+
+-spec param(binary(), fun()) -> any().
+param(Data, Fun) ->
+       whitespace(Data,
+               fun (Rest) ->
+                               token_ci(Rest,
+                                       fun (_Rest2, <<>>) -> {error, badarg};
+                                               (<< $=, Rest2/binary >>, Attr) ->
+                                                       word(Rest2,
+                                                               fun (Rest3, Value) ->
+                                                                               Fun(Rest3, Attr, Value)
+                                                               end);
+                                               (_Rest2, _Attr) -> {error, badarg}
+                                       end)
+               end).
+
 %% While this may not be the most efficient date parsing we can do,
 %% it should work fine for our purposes because all HTTP dates should
 %% be sent as RFC1123 dates in HTTP/1.1.
@@ -345,7 +366,6 @@ http_date(Data) ->
                        HTTPDate
        end.
 
-%% @doc Parse an RFC1123 date.
 -spec rfc1123_date(binary()) -> any().
 rfc1123_date(Data) ->
        wkday(Data,
@@ -365,7 +385,6 @@ rfc1123_date(Data) ->
                                {error, badarg}
                end).
 
-%% @doc Parse an RFC850 date.
 -spec rfc850_date(binary()) -> any().
 %% From the RFC:
 %% HTTP/1.1 clients and caches SHOULD assume that an RFC-850 date
@@ -389,7 +408,6 @@ rfc850_date(Data) ->
                                {error, badarg}
                end).
 
-%% @doc Parse an asctime date.
 -spec asctime_date(binary()) -> any().
 asctime_date(Data) ->
        wkday(Data,
@@ -407,7 +425,7 @@ asctime_date(Data) ->
                                                        {error, badarg}
                                        end);
                        (_Any, _WkDay) ->
-                               {error, badarg1}
+                               {error, badarg}
                end).
 
 -spec asctime_year(binary(), tuple(), tuple()) -> any().
@@ -548,7 +566,6 @@ time(<< H1, H2, ":", M1, M2, ":", S1, S2, Rest/binary >>, Fun)
                        {error, badarg}
        end.
 
-%% @doc Skip whitespace.
 -spec whitespace(binary(), fun()) -> any().
 whitespace(<< C, Rest/binary >>, Fun)
                when C =:= $\s; C =:= $\t ->
@@ -556,7 +573,6 @@ whitespace(<< C, Rest/binary >>, Fun)
 whitespace(Data, Fun) ->
        Fun(Data).
 
-%% @doc Parse a list of digits as a non negative integer.
 -spec digits(binary()) -> non_neg_integer() | {error, badarg}.
 digits(Data) ->
        digits(Data,
@@ -583,8 +599,6 @@ digits(<< C, Rest/binary >>, Fun, Acc)
 digits(Data, Fun, Acc) ->
        Fun(Data, Acc).
 
-%% @doc Parse a list of case-insensitive alpha characters.
-%%
 %% Changes all characters to lowercase.
 -spec alpha(binary(), fun()) -> any().
 alpha(Data, Fun) ->
@@ -601,6 +615,22 @@ alpha(<< C, Rest/binary >>, Fun, Acc)
 alpha(Data, Fun, Acc) ->
        Fun(Data, Acc).
 
+-spec alphanumeric(binary(), fun()) -> any().
+alphanumeric(Data, Fun) ->
+       alphanumeric(Data, Fun, <<>>).
+
+-spec alphanumeric(binary(), fun(), binary()) -> any().
+alphanumeric(<<>>, Fun, Acc) ->
+       Fun(<<>>, Acc);
+alphanumeric(<< C, Rest/binary >>, Fun, Acc)
+               when C >= $a andalso C =< $z;
+                        C >= $A andalso C =< $Z;
+                        C >= $0 andalso C =< $9 ->
+       C2 = cowboy_bstr:char_to_lower(C),
+       alphanumeric(Rest, Fun, << Acc/binary, C2 >>);
+alphanumeric(Data, Fun, Acc) ->
+       Fun(Data, Acc).
+
 %% @doc Parse either a token or a quoted string.
 -spec word(binary(), fun()) -> any().
 word(Data = << $", _/binary >>, Fun) ->
@@ -611,14 +641,11 @@ word(Data, Fun) ->
                        (Rest, Token) -> Fun(Rest, Token)
                end).
 
-%% @doc Parse a case-insensitive token.
-%%
 %% Changes all characters to lowercase.
 -spec token_ci(binary(), fun()) -> any().
 token_ci(Data, Fun) ->
        token(Data, Fun, ci, <<>>).
 
-%% @doc Parse a token.
 -spec token(binary(), fun()) -> any().
 token(Data, Fun) ->
        token(Data, Fun, cs, <<>>).
@@ -639,10 +666,11 @@ token(<< C, Rest/binary >>, Fun, Case = ci, Acc) ->
 token(<< C, Rest/binary >>, Fun, Case, Acc) ->
        token(Rest, Fun, Case, << Acc/binary, C >>).
 
-%% @doc Parse a quoted string.
 -spec quoted_string(binary(), fun()) -> any().
 quoted_string(<< $", Rest/binary >>, Fun) ->
-       quoted_string(Rest, Fun, <<>>).
+       quoted_string(Rest, Fun, <<>>);
+quoted_string(_, _Fun) ->
+    {error, badarg}.
 
 -spec quoted_string(binary(), fun(), binary()) -> any().
 quoted_string(<<>>, _Fun, _Acc) ->
@@ -654,10 +682,12 @@ quoted_string(<< $\\, C, Rest/binary >>, Fun, Acc) ->
 quoted_string(<< C, Rest/binary >>, Fun, Acc) ->
        quoted_string(Rest, Fun, << Acc/binary, C >>).
 
-%% @doc Parse a quality value.
 -spec qvalue(binary(), fun()) -> any().
 qvalue(<< $0, $., Rest/binary >>, Fun) ->
        qvalue(Rest, Fun, 0, 100);
+%% Some user agents use q=.x instead of q=0.x
+qvalue(<< $., Rest/binary >>, Fun) ->
+       qvalue(Rest, Fun, 0, 100);
 qvalue(<< $0, Rest/binary >>, Fun) ->
        Fun(Rest, 0);
 qvalue(<< $1, $., $0, $0, $0, Rest/binary >>, Fun) ->
@@ -680,143 +710,193 @@ qvalue(<< C, Rest/binary >>, Fun, Q, M)
 qvalue(Data, Fun, Q, _M) ->
        Fun(Data, Q).
 
+%% Only RFC2617 Basic authorization is supported so far.
+-spec authorization(binary(), binary()) -> {binary(), any()} | {error, badarg}.
+authorization(UserPass, Type = <<"basic">>) ->
+       whitespace(UserPass,
+               fun(D) ->
+                       authorization_basic_userid(base64:mime_decode(D),
+                               fun(Rest, Userid) ->
+                                       authorization_basic_password(Rest,
+                                               fun(Password) ->
+                                                       {Type, {Userid, Password}}
+                                               end)
+                               end)
+               end);
+authorization(String, Type) ->
+       whitespace(String, fun(Rest) -> {Type, Rest} end).
 
-%% Interpretation.
+-spec authorization_basic_userid(binary(), fun()) -> any().
+authorization_basic_userid(Data, Fun) ->
+       authorization_basic_userid(Data, Fun, <<>>).
 
-%% @doc Walk through a tokens list and return whether
-%% the connection is keepalive or closed.
-%%
-%% The connection token is expected to be lower-case.
--spec connection_to_atom([binary()]) -> keepalive | close.
-connection_to_atom([]) ->
-       keepalive;
-connection_to_atom([<<"keep-alive">>|_Tail]) ->
-       keepalive;
-connection_to_atom([<<"close">>|_Tail]) ->
-       close;
-connection_to_atom([_Any|Tail]) ->
-       connection_to_atom(Tail).
-
-%% @doc Decode a URL encoded binary.
-%% @equiv urldecode(Bin, crash)
--spec urldecode(binary()) -> binary().
-urldecode(Bin) when is_binary(Bin) ->
-       urldecode(Bin, <<>>, crash).
-
-%% @doc Decode a URL encoded binary.
-%% The second argument specifies how to handle percent characters that are not
-%% followed by two valid hex characters. Use `skip' to ignore such errors,
-%% if `crash' is used the function will fail with the reason `badarg'.
--spec urldecode(binary(), crash | skip) -> binary().
-urldecode(Bin, OnError) when is_binary(Bin) ->
-       urldecode(Bin, <<>>, OnError).
-
--spec urldecode(binary(), binary(), crash | skip) -> binary().
-urldecode(<<$%, H, L, Rest/binary>>, Acc, OnError) ->
-       G = unhex(H),
-       M = unhex(L),
-       if      G =:= error; M =:= error ->
-               case OnError of skip -> ok; crash -> erlang:error(badarg) end,
-               urldecode(<<H, L, Rest/binary>>, <<Acc/binary, $%>>, OnError);
-               true ->
-               urldecode(Rest, <<Acc/binary, (G bsl 4 bor M)>>, OnError)
-       end;
-urldecode(<<$%, Rest/binary>>, Acc, OnError) ->
-       case OnError of skip -> ok; crash -> erlang:error(badarg) end,
-       urldecode(Rest, <<Acc/binary, $%>>, OnError);
-urldecode(<<$+, Rest/binary>>, Acc, OnError) ->
-       urldecode(Rest, <<Acc/binary, $ >>, OnError);
-urldecode(<<C, Rest/binary>>, Acc, OnError) ->
-       urldecode(Rest, <<Acc/binary, C>>, OnError);
-urldecode(<<>>, Acc, _OnError) ->
-       Acc.
-
--spec unhex(byte()) -> byte() | error.
-unhex(C) when C >= $0, C =< $9 -> C - $0;
-unhex(C) when C >= $A, C =< $F -> C - $A + 10;
-unhex(C) when C >= $a, C =< $f -> C - $a + 10;
-unhex(_) -> error.
-
-
-%% @doc URL encode a string binary.
-%% @equiv urlencode(Bin, [])
--spec urlencode(binary()) -> binary().
-urlencode(Bin) ->
-       urlencode(Bin, []).
-
-%% @doc URL encode a string binary.
-%% The `noplus' option disables the default behaviour of quoting space
-%% characters, `\s', as `+'. The `upper' option overrides the default behaviour
-%% of writing hex numbers using lowecase letters to using uppercase letters
-%% instead.
--spec urlencode(binary(), [noplus|upper]) -> binary().
-urlencode(Bin, Opts) ->
-       Plus = not proplists:get_value(noplus, Opts, false),
-       Upper = proplists:get_value(upper, Opts, false),
-       urlencode(Bin, <<>>, Plus, Upper).
-
-urlencode(<<C, Rest/binary>>, Acc, P=Plus, U=Upper) ->
-       if      C >= $0, C =< $9 -> urlencode(Rest, <<Acc/binary, C>>, P, U);
-               C >= $A, C =< $Z -> urlencode(Rest, <<Acc/binary, C>>, P, U);
-               C >= $a, C =< $z -> urlencode(Rest, <<Acc/binary, C>>, P, U);
-               C =:= $.; C =:= $-; C =:= $~; C =:= $_ ->
-               urlencode(Rest, <<Acc/binary, C>>, P, U);
-               C =:= $ , Plus ->
-               urlencode(Rest, <<Acc/binary, $+>>, P, U);
-               true ->
-               H = C band 16#F0 bsr 4, L = C band 16#0F,
-               H1 = if Upper -> tohexu(H); true -> tohexl(H) end,
-               L1 = if Upper -> tohexu(L); true -> tohexl(L) end,
-               urlencode(Rest, <<Acc/binary, $%, H1, L1>>, P, U)
-       end;
-urlencode(<<>>, Acc, _Plus, _Upper) ->
-       Acc.
+authorization_basic_userid(<<>>, _Fun, _Acc) ->
+       {error, badarg};
+authorization_basic_userid(<<C, _Rest/binary>>, _Fun, Acc)
+               when C < 32; C =:= 127; (C =:=$: andalso Acc =:= <<>>) ->
+       {error, badarg};
+authorization_basic_userid(<<$:, Rest/binary>>, Fun, Acc) ->
+       Fun(Rest, Acc);
+authorization_basic_userid(<<C, Rest/binary>>, Fun, Acc) ->
+       authorization_basic_userid(Rest, Fun, <<Acc/binary, C>>).
 
--spec tohexu(byte()) -> byte().
-tohexu(C) when C < 10 -> $0 + C;
-tohexu(C) when C < 17 -> $A + C - 10.
+-spec authorization_basic_password(binary(), fun()) -> any().
+authorization_basic_password(Data, Fun) ->
+       authorization_basic_password(Data, Fun, <<>>).
 
--spec tohexl(byte()) -> byte().
-tohexl(C) when C < 10 -> $0 + C;
-tohexl(C) when C < 17 -> $a + C - 10.
+authorization_basic_password(<<C, _Rest/binary>>, _Fun, _Acc)
+               when C < 32; C=:= 127 ->
+       {error, badarg};
+authorization_basic_password(<<>>, Fun, Acc) ->
+       Fun(Acc);
+authorization_basic_password(<<C, Rest/binary>>, Fun, Acc) ->
+       authorization_basic_password(Rest, Fun, <<Acc/binary, C>>).
+
+-spec range(binary()) -> {Unit, [Range]} | {error, badarg} when
+               Unit :: binary(),
+               Range :: {non_neg_integer(), non_neg_integer() | infinity} | neg_integer().
+range(Data) ->
+       token_ci(Data, fun range/2).
+
+range(Data, Token) ->
+       whitespace(Data,
+               fun(<<"=", Rest/binary>>) ->
+                       case list(Rest, fun range_beginning/2) of
+                               {error, badarg} ->
+                                       {error, badarg};
+                               Ranges ->
+                                       {Token, Ranges}
+                       end;
+                  (_) ->
+                       {error, badarg}
+               end).
 
+range_beginning(Data, Fun) ->
+       range_digits(Data, suffix,
+               fun(D, RangeBeginning) ->
+                       range_ending(D, Fun, RangeBeginning)
+               end).
+
+range_ending(Data, Fun, RangeBeginning) ->
+       whitespace(Data,
+               fun(<<"-", R/binary>>) ->
+                       case RangeBeginning of
+                               suffix ->
+                                       range_digits(R, fun(D, RangeEnding) -> Fun(D, -RangeEnding) end);
+                               _ ->
+                                       range_digits(R, infinity,
+                                               fun(D, RangeEnding) ->
+                                                       Fun(D, {RangeBeginning, RangeEnding})
+                                               end)
+                       end;
+                  (_) ->
+                       {error, badarg}
+               end).
+
+-spec range_digits(binary(), fun()) -> any().
+range_digits(Data, Fun) ->
+       whitespace(Data,
+               fun(D) ->
+                       digits(D, Fun)
+               end).
+
+-spec range_digits(binary(), any(), fun()) -> any().
+range_digits(Data, Default, Fun) ->
+       whitespace(Data,
+               fun(<< C, Rest/binary >>) when C >= $0, C =< $9 ->
+                       digits(Rest, Fun, C - $0);
+                  (_) ->
+                       Fun(Data, Default)
+               end).
+
+-spec parameterized_tokens(binary()) -> any().
+parameterized_tokens(Data) ->
+       nonempty_list(Data,
+               fun (D, Fun) ->
+                       token(D,
+                               fun (_Rest, <<>>) -> {error, badarg};
+                                       (Rest, Token) ->
+                                               parameterized_tokens_params(Rest,
+                                                       fun (Rest2, Params) ->
+                                                               Fun(Rest2, {Token, Params})
+                                                       end, [])
+                               end)
+               end).
+
+-spec parameterized_tokens_params(binary(), fun(), [binary() | {binary(), binary()}]) -> any().
+parameterized_tokens_params(Data, Fun, Acc) ->
+       whitespace(Data,
+               fun (<< $;, Rest/binary >>) ->
+                               parameterized_tokens_param(Rest,
+                                       fun (Rest2, Param) ->
+                                                       parameterized_tokens_params(Rest2, Fun, [Param|Acc])
+                                       end);
+                       (Rest) ->
+                               Fun(Rest, lists:reverse(Acc))
+               end).
+
+-spec parameterized_tokens_param(binary(), fun()) -> any().
+parameterized_tokens_param(Data, Fun) ->
+       whitespace(Data,
+               fun (Rest) ->
+                               token(Rest,
+                                       fun (_Rest2, <<>>) -> {error, badarg};
+                                               (<< $=, Rest2/binary >>, Attr) ->
+                                                       word(Rest2,
+                                                               fun (Rest3, Value) ->
+                                                                               Fun(Rest3, {Attr, Value})
+                                                               end);
+                                               (Rest2, Attr) ->
+                                                       Fun(Rest2, Attr)
+                                       end)
+               end).
+
+%% Decoding.
+
+%% @todo Move this to cowlib too I suppose. :-)
+-spec ce_identity(binary()) -> {ok, binary()}.
+ce_identity(Data) ->
+       {ok, Data}.
 
 %% Tests.
 
 -ifdef(TEST).
-
 nonempty_charset_list_test_() ->
-       %% {Value, Result}
        Tests = [
                {<<>>, {error, badarg}},
                {<<"iso-8859-5, unicode-1-1;q=0.8">>, [
                        {<<"iso-8859-5">>, 1000},
                        {<<"unicode-1-1">>, 800}
+               ]},
+               %% Some user agents send this invalid value for the Accept-Charset header
+               {<<"ISO-8859-1;utf-8;q=0.7,*;q=0.7">>, [
+                       {<<"iso-8859-1">>, 1000},
+                       {<<"utf-8">>, 700},
+                       {<<"*">>, 700}
                ]}
        ],
        [{V, fun() -> R = nonempty_list(V, fun conneg/2) end} || {V, R} <- Tests].
 
 nonempty_language_range_list_test_() ->
-       %% {Value, Result}
        Tests = [
                {<<"da, en-gb;q=0.8, en;q=0.7">>, [
                        {<<"da">>, 1000},
                        {<<"en-gb">>, 800},
                        {<<"en">>, 700}
                ]},
-               {<<"en, en-US, en-cockney, i-cherokee, x-pig-latin">>, [
+               {<<"en, en-US, en-cockney, i-cherokee, x-pig-latin, es-419">>, [
                        {<<"en">>, 1000},
                        {<<"en-us">>, 1000},
                        {<<"en-cockney">>, 1000},
                        {<<"i-cherokee">>, 1000},
-                       {<<"x-pig-latin">>, 1000}
+                       {<<"x-pig-latin">>, 1000},
+                       {<<"es-419">>, 1000}
                ]}
        ],
        [{V, fun() -> R = nonempty_list(V, fun language_range/2) end}
                || {V, R} <- Tests].
 
 nonempty_token_list_test_() ->
-       %% {Value, Result}
        Tests = [
                {<<>>, {error, badarg}},
                {<<" ">>, {error, badarg}},
@@ -832,7 +912,6 @@ nonempty_token_list_test_() ->
        [{V, fun() -> R = nonempty_list(V, fun token/2) end} || {V, R} <- Tests].
 
 media_range_list_test_() ->
-       %% {Tokens, Result}
        Tests = [
                {<<"audio/*; q=0.2, audio/basic">>, [
                        {{<<"audio">>, <<"*">>, []}, 200, []},
@@ -865,12 +944,18 @@ media_range_list_test_() ->
                                [{<<"level">>, <<"1">>}, {<<"quoted">>, <<"hi hi hi">>}]}, 123,
                                [<<"standalone">>, {<<"complex">>, <<"gits">>}]},
                        {{<<"text">>, <<"plain">>, []}, 1000, []}
+               ]},
+               {<<"text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2">>, [
+                       {{<<"text">>, <<"html">>, []}, 1000, []},
+                       {{<<"image">>, <<"gif">>, []}, 1000, []},
+                       {{<<"image">>, <<"jpeg">>, []}, 1000, []},
+                       {{<<"*">>, <<"*">>, []}, 200, []},
+                       {{<<"*">>, <<"*">>, []}, 200, []}
                ]}
        ],
        [{V, fun() -> R = list(V, fun media_range/2) end} || {V, R} <- Tests].
 
 entity_tag_match_test_() ->
-       %% {Tokens, Result}
        Tests = [
                {<<"\"xyzzy\"">>, [{strong, <<"xyzzy">>}]},
                {<<"\"xyzzy\", W/\"r2d2xxxx\", \"c3piozzzz\"">>,
@@ -882,7 +967,6 @@ entity_tag_match_test_() ->
        [{V, fun() -> R = entity_tag_match(V) end} || {V, R} <- Tests].
 
 http_date_test_() ->
-       %% {Tokens, Result}
        Tests = [
                {<<"Sun, 06 Nov 1994 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}},
                {<<"Sunday, 06-Nov-94 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}},
@@ -891,38 +975,24 @@ http_date_test_() ->
        [{V, fun() -> R = http_date(V) end} || {V, R} <- Tests].
 
 rfc1123_date_test_() ->
-       %% {Tokens, Result}
        Tests = [
                {<<"Sun, 06 Nov 1994 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}}
        ],
        [{V, fun() -> R = rfc1123_date(V) end} || {V, R} <- Tests].
 
 rfc850_date_test_() ->
-       %% {Tokens, Result}
        Tests = [
                {<<"Sunday, 06-Nov-94 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}}
        ],
        [{V, fun() -> R = rfc850_date(V) end} || {V, R} <- Tests].
 
 asctime_date_test_() ->
-       %% {Tokens, Result}
        Tests = [
                {<<"Sun Nov  6 08:49:37 1994">>, {{1994, 11, 6}, {8, 49, 37}}}
        ],
        [{V, fun() -> R = asctime_date(V) end} || {V, R} <- Tests].
 
-connection_to_atom_test_() ->
-       %% {Tokens, Result}
-       Tests = [
-               {[<<"close">>], close},
-               {[<<"keep-alive">>], keepalive},
-               {[<<"keep-alive">>, <<"upgrade">>], keepalive}
-       ],
-       [{lists:flatten(io_lib:format("~p", [T])),
-               fun() -> R = connection_to_atom(T) end} || {T, R} <- Tests].
-
 content_type_test_() ->
-       %% {ContentType, Result}
        Tests = [
                {<<"text/plain; charset=iso-8859-4">>,
                        {<<"text">>, <<"plain">>, [{<<"charset">>, <<"iso-8859-4">>}]}},
@@ -938,8 +1008,17 @@ content_type_test_() ->
        ],
        [{V, fun () -> R = content_type(V) end} || {V, R} <- Tests].
 
+parameterized_tokens_test_() ->
+       Tests = [
+               {<<"foo">>, [{<<"foo">>, []}]},
+               {<<"bar; baz=2">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}]}]},
+               {<<"bar; baz=2;bat">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}, <<"bat">>]}]},
+               {<<"bar; baz=2;bat=\"z=1,2;3\"">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}, {<<"bat">>, <<"z=1,2;3">>}]}]},
+               {<<"foo, bar; baz=2">>, [{<<"foo">>, []}, {<<"bar">>, [{<<"baz">>, <<"2">>}]}]}
+       ],
+       [{V, fun () -> R = parameterized_tokens(V) end} || {V, R} <- Tests].
+
 digits_test_() ->
-       %% {Digits, Result}
        Tests = [
                {<<"42    ">>, 42},
                {<<"69\t">>, 69},
@@ -947,28 +1026,43 @@ digits_test_() ->
        ],
        [{V, fun() -> R = digits(V) end} || {V, R} <- Tests].
 
-urldecode_test_() ->
-       U = fun urldecode/2,
-       [?_assertEqual(<<" ">>, U(<<"%20">>, crash)),
-        ?_assertEqual(<<" ">>, U(<<"+">>, crash)),
-        ?_assertEqual(<<0>>, U(<<"%00">>, crash)),
-        ?_assertEqual(<<255>>, U(<<"%fF">>, crash)),
-        ?_assertEqual(<<"123">>, U(<<"123">>, crash)),
-        ?_assertEqual(<<"%i5">>, U(<<"%i5">>, skip)),
-        ?_assertEqual(<<"%5">>, U(<<"%5">>, skip)),
-        ?_assertError(badarg, U(<<"%i5">>, crash)),
-        ?_assertError(badarg, U(<<"%5">>, crash))
-       ].
-
-urlencode_test_() ->
-       U = fun urlencode/2,
-       [?_assertEqual(<<"%ff%00">>, U(<<255,0>>, [])),
-        ?_assertEqual(<<"%FF%00">>, U(<<255,0>>, [upper])),
-        ?_assertEqual(<<"+">>, U(<<" ">>, [])),
-        ?_assertEqual(<<"%20">>, U(<<" ">>, [noplus])),
-        ?_assertEqual(<<"aBc">>, U(<<"aBc">>, [])),
-        ?_assertEqual(<<".-~_">>, U(<<".-~_">>, [])),
-        ?_assertEqual(<<"%ff+">>, urlencode(<<255, " ">>))
-       ].
+http_authorization_test_() ->
+       Tests = [
+               {<<"basic">>, <<"QWxsYWRpbjpvcGVuIHNlc2FtZQ==">>,
+                       {<<"basic">>, {<<"Alladin">>, <<"open sesame">>}}},
+               {<<"basic">>, <<"dXNlcm5hbWU6">>,
+                       {<<"basic">>, {<<"username">>, <<>>}}},
+               {<<"basic">>, <<"dXNlcm5hbWUK">>,
+                       {error, badarg}},
+               {<<"basic">>, <<"_[]@#$%^&*()-AA==">>,
+                       {error, badarg}},
+               {<<"basic">>, <<"dXNlcjpwYXNzCA==">>,
+                       {error, badarg}},
+               {<<"bearer">>, <<" some_secret_key">>,
+                       {<<"bearer">>,<<"some_secret_key">>}}
+       ],
+       [{V, fun() -> R = authorization(V,T) end} || {T, V, R} <- Tests].
 
+http_range_test_() ->
+       Tests = [
+               {<<"bytes=1-20">>,
+                       {<<"bytes">>, [{1, 20}]}},
+               {<<"bytes=-100">>,
+                       {<<"bytes">>, [-100]}},
+               {<<"bytes=1-">>,
+                       {<<"bytes">>, [{1, infinity}]}},
+               {<<"bytes=1-20,30-40,50-">>,
+                       {<<"bytes">>, [{1, 20}, {30, 40}, {50, infinity}]}},
+               {<<"bytes = 1 - 20 , 50 - , - 300 ">>,
+                       {<<"bytes">>, [{1, 20}, {50, infinity}, -300]}},
+               {<<"bytes=1-20,-500,30-40">>,
+                       {<<"bytes">>, [{1, 20}, -500, {30, 40}]}},
+               {<<"test=1-20,-500,30-40">>,
+                       {<<"test">>, [{1, 20}, -500, {30, 40}]}},
+               {<<"bytes=-">>,
+                       {error, badarg}},
+               {<<"bytes=-30,-">>,
+                       {error, badarg}}
+       ],
+       [fun() -> R = range(V) end ||{V, R} <- Tests].
 -endif.
diff --git a/deps/cowboy/src/cowboy_http_handler.erl b/deps/cowboy/src/cowboy_http_handler.erl
new file mode 100644 (file)
index 0000000..14c7987
--- /dev/null
@@ -0,0 +1,37 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_http_handler).
+
+-type opts() :: any().
+-type state() :: any().
+-type terminate_reason() :: {normal, shutdown}
+       | {normal, timeout} %% Only occurs in loop handlers.
+       | {error, closed} %% Only occurs in loop handlers.
+       | {error, overflow} %% Only occurs in loop handlers.
+       | {error, atom()}.
+
+-callback init({atom(), http}, Req, opts())
+       -> {ok, Req, state()}
+       | {loop, Req, state()}
+       | {loop, Req, state(), hibernate}
+       | {loop, Req, state(), timeout()}
+       | {loop, Req, state(), timeout(), hibernate}
+       | {shutdown, Req, state()}
+       | {upgrade, protocol, module()}
+       | {upgrade, protocol, module(), Req, opts()}
+       when Req::cowboy_req:req().
+-callback handle(Req, State) -> {ok, Req, State}
+       when Req::cowboy_req:req(), State::state().
+-callback terminate(terminate_reason(), cowboy_req:req(), state()) -> ok.
diff --git a/deps/cowboy/src/cowboy_loop_handler.erl b/deps/cowboy/src/cowboy_loop_handler.erl
new file mode 100644 (file)
index 0000000..edef77f
--- /dev/null
@@ -0,0 +1,40 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_loop_handler).
+
+-type opts() :: any().
+-type state() :: any().
+-type terminate_reason() :: {normal, shutdown}
+       | {normal, timeout}
+       | {error, closed}
+       | {error, overflow}
+       | {error, atom()}.
+
+-callback init({atom(), http}, Req, opts())
+       -> {ok, Req, state()}
+       | {loop, Req, state()}
+       | {loop, Req, state(), hibernate}
+       | {loop, Req, state(), timeout()}
+       | {loop, Req, state(), timeout(), hibernate}
+       | {shutdown, Req, state()}
+       | {upgrade, protocol, module()}
+       | {upgrade, protocol, module(), Req, opts()}
+       when Req::cowboy_req:req().
+-callback info(any(), Req, State)
+       -> {ok, Req, State}
+       | {loop, Req, State}
+       | {loop, Req, State, hibernate}
+       when Req::cowboy_req:req(), State::state().
+-callback terminate(terminate_reason(), cowboy_req:req(), state()) -> ok.
diff --git a/deps/cowboy/src/cowboy_middleware.erl b/deps/cowboy/src/cowboy_middleware.erl
new file mode 100644 (file)
index 0000000..fa0f5bc
--- /dev/null
@@ -0,0 +1,25 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_middleware).
+
+-type env() :: [{atom(), any()}].
+-export_type([env/0]).
+
+-callback execute(Req, Env)
+       -> {ok, Req, Env}
+       | {suspend, module(), atom(), [any()]}
+       | {halt, Req}
+       | {error, cowboy:http_status(), Req}
+       when Req::cowboy_req:req(), Env::env().
diff --git a/deps/cowboy/src/cowboy_protocol.erl b/deps/cowboy/src/cowboy_protocol.erl
new file mode 100644 (file)
index 0000000..1026d28
--- /dev/null
@@ -0,0 +1,510 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_protocol).
+
+%% API.
+-export([start_link/4]).
+
+%% Internal.
+-export([init/4]).
+-export([parse_request/3]).
+-export([resume/6]).
+
+-type opts() :: [{compress, boolean()}
+       | {env, cowboy_middleware:env()}
+       | {max_empty_lines, non_neg_integer()}
+       | {max_header_name_length, non_neg_integer()}
+       | {max_header_value_length, non_neg_integer()}
+       | {max_headers, non_neg_integer()}
+       | {max_keepalive, non_neg_integer()}
+       | {max_request_line_length, non_neg_integer()}
+       | {middlewares, [module()]}
+       | {onrequest, cowboy:onrequest_fun()}
+       | {onresponse, cowboy:onresponse_fun()}
+       | {timeout, timeout()}].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module(),
+       middlewares :: [module()],
+       compress :: boolean(),
+       env :: cowboy_middleware:env(),
+       onrequest :: undefined | cowboy:onrequest_fun(),
+       onresponse = undefined :: undefined | cowboy:onresponse_fun(),
+       max_empty_lines :: non_neg_integer(),
+       req_keepalive = 1 :: non_neg_integer(),
+       max_keepalive :: non_neg_integer(),
+       max_request_line_length :: non_neg_integer(),
+       max_header_name_length :: non_neg_integer(),
+       max_header_value_length :: non_neg_integer(),
+       max_headers :: non_neg_integer(),
+       timeout :: timeout(),
+       until :: non_neg_integer() | infinity
+}).
+
+-include_lib("cowlib/include/cow_inline.hrl").
+
+%% API.
+
+-spec start_link(ranch:ref(), inet:socket(), module(), opts()) -> {ok, pid()}.
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+%% Internal.
+
+%% Faster alternative to proplists:get_value/3.
+get_value(Key, Opts, Default) ->
+       case lists:keyfind(Key, 1, Opts) of
+               {_, Value} -> Value;
+               _ -> Default
+       end.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, Opts) ->
+       Compress = get_value(compress, Opts, false),
+       MaxEmptyLines = get_value(max_empty_lines, Opts, 5),
+       MaxHeaderNameLength = get_value(max_header_name_length, Opts, 64),
+       MaxHeaderValueLength = get_value(max_header_value_length, Opts, 4096),
+       MaxHeaders = get_value(max_headers, Opts, 100),
+       MaxKeepalive = get_value(max_keepalive, Opts, 100),
+       MaxRequestLineLength = get_value(max_request_line_length, Opts, 4096),
+       Middlewares = get_value(middlewares, Opts, [cowboy_router, cowboy_handler]),
+       Env = [{listener, Ref}|get_value(env, Opts, [])],
+       OnRequest = get_value(onrequest, Opts, undefined),
+       OnResponse = get_value(onresponse, Opts, undefined),
+       Timeout = get_value(timeout, Opts, 5000),
+       ok = ranch:accept_ack(Ref),
+       wait_request(<<>>, #state{socket=Socket, transport=Transport,
+               middlewares=Middlewares, compress=Compress, env=Env,
+               max_empty_lines=MaxEmptyLines, max_keepalive=MaxKeepalive,
+               max_request_line_length=MaxRequestLineLength,
+               max_header_name_length=MaxHeaderNameLength,
+               max_header_value_length=MaxHeaderValueLength, max_headers=MaxHeaders,
+               onrequest=OnRequest, onresponse=OnResponse,
+               timeout=Timeout, until=until(Timeout)}, 0).
+
+-spec until(timeout()) -> non_neg_integer() | infinity.
+until(infinity) ->
+       infinity;
+until(Timeout) ->
+       {Me, S, Mi} = os:timestamp(),
+       Me * 1000000000 + S * 1000 + Mi div 1000 + Timeout.
+
+%% Request parsing.
+%%
+%% The next set of functions is the request parsing code. All of it
+%% runs using a single binary match context. This optimization ends
+%% right after the header parsing is finished and the code becomes
+%% more interesting past that point.
+
+-spec recv(inet:socket(), module(), non_neg_integer() | infinity)
+       -> {ok, binary()} | {error, closed | timeout | atom()}.
+recv(Socket, Transport, infinity) ->
+       Transport:recv(Socket, 0, infinity);
+recv(Socket, Transport, Until) ->
+       {Me, S, Mi} = os:timestamp(),
+       Now = Me * 1000000000 + S * 1000 + Mi div 1000,
+       Timeout = Until - Now,
+       if      Timeout < 0 ->
+                       {error, timeout};
+               true ->
+                       Transport:recv(Socket, 0, Timeout)
+       end.
+
+-spec wait_request(binary(), #state{}, non_neg_integer()) -> ok.
+wait_request(Buffer, State=#state{socket=Socket, transport=Transport,
+               until=Until}, ReqEmpty) ->
+       case recv(Socket, Transport, Until) of
+               {ok, Data} ->
+                       parse_request(<< Buffer/binary, Data/binary >>, State, ReqEmpty);
+               {error, _} ->
+                       terminate(State)
+       end.
+
+-spec parse_request(binary(), #state{}, non_neg_integer()) -> ok.
+%% Empty lines must be using \r\n.
+parse_request(<< $\n, _/binary >>, State, _) ->
+       error_terminate(400, State);
+%% We limit the length of the Request-line to MaxLength to avoid endlessly
+%% reading from the socket and eventually crashing.
+parse_request(Buffer, State=#state{max_request_line_length=MaxLength,
+               max_empty_lines=MaxEmpty}, ReqEmpty) ->
+       case match_eol(Buffer, 0) of
+               nomatch when byte_size(Buffer) > MaxLength ->
+                       error_terminate(414, State);
+               nomatch ->
+                       wait_request(Buffer, State, ReqEmpty);
+               1 when ReqEmpty =:= MaxEmpty ->
+                       error_terminate(400, State);
+               1 ->
+                       << _:16, Rest/binary >> = Buffer,
+                       parse_request(Rest, State, ReqEmpty + 1);
+               _ ->
+                       parse_method(Buffer, State, <<>>)
+       end.
+
+match_eol(<< $\n, _/bits >>, N) ->
+       N;
+match_eol(<< _, Rest/bits >>, N) ->
+       match_eol(Rest, N + 1);
+match_eol(_, _) ->
+       nomatch.
+
+parse_method(<< C, Rest/bits >>, State, SoFar) ->
+       case C of
+               $\r -> error_terminate(400, State);
+               $\s -> parse_uri(Rest, State, SoFar);
+               _ -> parse_method(Rest, State, << SoFar/binary, C >>)
+       end.
+
+parse_uri(<< $\r, _/bits >>, State, _) ->
+       error_terminate(400, State);
+parse_uri(<< "* ", Rest/bits >>, State, Method) ->
+       parse_version(Rest, State, Method, <<"*">>, <<>>);
+parse_uri(<< "http://", Rest/bits >>, State, Method) ->
+       parse_uri_skip_host(Rest, State, Method);
+parse_uri(<< "https://", Rest/bits >>, State, Method) ->
+       parse_uri_skip_host(Rest, State, Method);
+parse_uri(<< "HTTP://", Rest/bits >>, State, Method) ->
+       parse_uri_skip_host(Rest, State, Method);
+parse_uri(<< "HTTPS://", Rest/bits >>, State, Method) ->
+       parse_uri_skip_host(Rest, State, Method);
+parse_uri(Buffer, State, Method) ->
+       parse_uri_path(Buffer, State, Method, <<>>).
+
+parse_uri_skip_host(<< C, Rest/bits >>, State, Method) ->
+       case C of
+               $\r -> error_terminate(400, State);
+               $/ -> parse_uri_path(Rest, State, Method, <<"/">>);
+               $\s -> parse_version(Rest, State, Method, <<"/">>, <<>>);
+               $? -> parse_uri_query(Rest, State, Method, <<"/">>, <<>>);
+               $# -> skip_uri_fragment(Rest, State, Method, <<"/">>, <<>>);
+               _ -> parse_uri_skip_host(Rest, State, Method)
+       end.
+
+parse_uri_path(<< C, Rest/bits >>, State, Method, SoFar) ->
+       case C of
+               $\r -> error_terminate(400, State);
+               $\s -> parse_version(Rest, State, Method, SoFar, <<>>);
+               $? -> parse_uri_query(Rest, State, Method, SoFar, <<>>);
+               $# -> skip_uri_fragment(Rest, State, Method, SoFar, <<>>);
+               _ -> parse_uri_path(Rest, State, Method, << SoFar/binary, C >>)
+       end.
+
+parse_uri_query(<< C, Rest/bits >>, S, M, P, SoFar) ->
+       case C of
+               $\r -> error_terminate(400, S);
+               $\s -> parse_version(Rest, S, M, P, SoFar);
+               $# -> skip_uri_fragment(Rest, S, M, P, SoFar);
+               _ -> parse_uri_query(Rest, S, M, P, << SoFar/binary, C >>)
+       end.
+
+skip_uri_fragment(<< C, Rest/bits >>, S, M, P, Q) ->
+       case C of
+               $\r -> error_terminate(400, S);
+               $\s -> parse_version(Rest, S, M, P, Q);
+               _ -> skip_uri_fragment(Rest, S, M, P, Q)
+       end.
+
+parse_version(<< "HTTP/1.1\r\n", Rest/bits >>, S, M, P, Q) ->
+       parse_header(Rest, S, M, P, Q, 'HTTP/1.1', []);
+parse_version(<< "HTTP/1.0\r\n", Rest/bits >>, S, M, P, Q) ->
+       parse_header(Rest, S, M, P, Q, 'HTTP/1.0', []);
+parse_version(_, State, _, _, _) ->
+       error_terminate(505, State).
+
+%% Stop receiving data if we have more than allowed number of headers.
+wait_header(_, State=#state{max_headers=MaxHeaders}, _, _, _, _, Headers)
+               when length(Headers) >= MaxHeaders ->
+       error_terminate(400, State);
+wait_header(Buffer, State=#state{socket=Socket, transport=Transport,
+               until=Until}, M, P, Q, V, H) ->
+       case recv(Socket, Transport, Until) of
+               {ok, Data} ->
+                       parse_header(<< Buffer/binary, Data/binary >>,
+                               State, M, P, Q, V, H);
+               {error, timeout} ->
+                       error_terminate(408, State);
+               {error, _} ->
+                       terminate(State)
+       end.
+
+parse_header(<< $\r, $\n, Rest/bits >>, S, M, P, Q, V, Headers) ->
+       request(Rest, S, M, P, Q, V, lists:reverse(Headers));
+parse_header(Buffer, State=#state{max_header_name_length=MaxLength},
+               M, P, Q, V, H) ->
+       case match_colon(Buffer, 0) of
+               nomatch when byte_size(Buffer) > MaxLength ->
+                       error_terminate(400, State);
+               nomatch ->
+                       wait_header(Buffer, State, M, P, Q, V, H);
+               _ ->
+                       parse_hd_name(Buffer, State, M, P, Q, V, H, <<>>)
+       end.
+
+match_colon(<< $:, _/bits >>, N) ->
+       N;
+match_colon(<< _, Rest/bits >>, N) ->
+       match_colon(Rest, N + 1);
+match_colon(_, _) ->
+       nomatch.
+
+parse_hd_name(<< C, Rest/bits >>, S, M, P, Q, V, H, SoFar) ->
+       case C of
+               $: -> parse_hd_before_value(Rest, S, M, P, Q, V, H, SoFar);
+               $\s -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, SoFar);
+               $\t -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, SoFar);
+               ?INLINE_LOWERCASE(parse_hd_name, Rest, S, M, P, Q, V, H, SoFar)
+       end.
+
+parse_hd_name_ws(<< C, Rest/bits >>, S, M, P, Q, V, H, Name) ->
+       case C of
+               $\s -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, Name);
+               $\t -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, Name);
+               $: -> parse_hd_before_value(Rest, S, M, P, Q, V, H, Name)
+       end.
+
+wait_hd_before_value(Buffer, State=#state{
+               socket=Socket, transport=Transport, until=Until},
+               M, P, Q, V, H, N) ->
+       case recv(Socket, Transport, Until) of
+               {ok, Data} ->
+                       parse_hd_before_value(<< Buffer/binary, Data/binary >>,
+                               State, M, P, Q, V, H, N);
+               {error, timeout} ->
+                       error_terminate(408, State);
+               {error, _} ->
+                       terminate(State)
+       end.
+
+parse_hd_before_value(<< $\s, Rest/bits >>, S, M, P, Q, V, H, N) ->
+       parse_hd_before_value(Rest, S, M, P, Q, V, H, N);
+parse_hd_before_value(<< $\t, Rest/bits >>, S, M, P, Q, V, H, N) ->
+       parse_hd_before_value(Rest, S, M, P, Q, V, H, N);
+parse_hd_before_value(Buffer, State=#state{
+               max_header_value_length=MaxLength}, M, P, Q, V, H, N) ->
+       case match_eol(Buffer, 0) of
+               nomatch when byte_size(Buffer) > MaxLength ->
+                       error_terminate(400, State);
+               nomatch ->
+                       wait_hd_before_value(Buffer, State, M, P, Q, V, H, N);
+               _ ->
+                       parse_hd_value(Buffer, State, M, P, Q, V, H, N, <<>>)
+       end.
+
+%% We completely ignore the first argument which is always
+%% the empty binary. We keep it there because we don't want
+%% to change the other arguments' position and trigger costy
+%% operations for no reasons.
+wait_hd_value(_, State=#state{
+               socket=Socket, transport=Transport, until=Until},
+               M, P, Q, V, H, N, SoFar) ->
+       case recv(Socket, Transport, Until) of
+               {ok, Data} ->
+                       parse_hd_value(Data, State, M, P, Q, V, H, N, SoFar);
+               {error, timeout} ->
+                       error_terminate(408, State);
+               {error, _} ->
+                       terminate(State)
+       end.
+
+%% Pushing back as much as we could the retrieval of new data
+%% to check for multilines allows us to avoid a few tests in
+%% the critical path, but forces us to have a special function.
+wait_hd_value_nl(_, State=#state{
+               socket=Socket, transport=Transport, until=Until},
+               M, P, Q, V, Headers, Name, SoFar) ->
+       case recv(Socket, Transport, Until) of
+               {ok, << C, Data/bits >>} when C =:= $\s; C =:= $\t  ->
+                       parse_hd_value(Data, State, M, P, Q, V, Headers, Name, SoFar);
+               {ok, Data} ->
+                       parse_header(Data, State, M, P, Q, V, [{Name, SoFar}|Headers]);
+               {error, timeout} ->
+                       error_terminate(408, State);
+               {error, _} ->
+                       terminate(State)
+       end.
+
+parse_hd_value(<< $\r, Rest/bits >>, S, M, P, Q, V, Headers, Name, SoFar) ->
+       case Rest of
+               << $\n >> ->
+                       wait_hd_value_nl(<<>>, S, M, P, Q, V, Headers, Name, SoFar);
+               << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
+                       parse_hd_value(Rest2, S, M, P, Q, V, Headers, Name,
+                               << SoFar/binary, C >>);
+               << $\n, Rest2/bits >> ->
+                       parse_header(Rest2, S, M, P, Q, V, [{Name, SoFar}|Headers])
+       end;
+parse_hd_value(<< C, Rest/bits >>, S, M, P, Q, V, H, N, SoFar) ->
+       parse_hd_value(Rest, S, M, P, Q, V, H, N, << SoFar/binary, C >>);
+parse_hd_value(<<>>, State=#state{max_header_value_length=MaxLength},
+               _, _, _, _, _, _, SoFar) when byte_size(SoFar) > MaxLength ->
+       error_terminate(400, State);
+parse_hd_value(<<>>, S, M, P, Q, V, H, N, SoFar) ->
+       wait_hd_value(<<>>, S, M, P, Q, V, H, N, SoFar).
+
+request(B, State=#state{transport=Transport}, M, P, Q, Version, Headers) ->
+       case lists:keyfind(<<"host">>, 1, Headers) of
+               false when Version =:= 'HTTP/1.1' ->
+                       error_terminate(400, State);
+               false ->
+                       request(B, State, M, P, Q, Version, Headers,
+                               <<>>, default_port(Transport:name()));
+               {_, RawHost} ->
+                       try parse_host(RawHost, false, <<>>) of
+                               {Host, undefined} ->
+                                       request(B, State, M, P, Q, Version, Headers,
+                                               Host, default_port(Transport:name()));
+                               {Host, Port} ->
+                                       request(B, State, M, P, Q, Version, Headers,
+                                               Host, Port)
+                       catch _:_ ->
+                               error_terminate(400, State)
+                       end
+       end.
+
+-spec default_port(atom()) -> 80 | 443.
+default_port(ssl) -> 443;
+default_port(_) -> 80.
+
+%% Same code as cow_http:parse_fullhost/1, but inline because we
+%% really want this to go fast.
+parse_host(<< $[, Rest/bits >>, false, <<>>) ->
+       parse_host(Rest, true, << $[ >>);
+parse_host(<<>>, false, Acc) ->
+       {Acc, undefined};
+parse_host(<< $:, Rest/bits >>, false, Acc) ->
+       {Acc, list_to_integer(binary_to_list(Rest))};
+parse_host(<< $], Rest/bits >>, true, Acc) ->
+       parse_host(Rest, false, << Acc/binary, $] >>);
+parse_host(<< C, Rest/bits >>, E, Acc) ->
+       case C of
+               ?INLINE_LOWERCASE(parse_host, Rest, E, Acc)
+       end.
+
+%% End of request parsing.
+%%
+%% We create the Req object and start handling the request.
+
+request(Buffer, State=#state{socket=Socket, transport=Transport,
+               req_keepalive=ReqKeepalive, max_keepalive=MaxKeepalive,
+               compress=Compress, onresponse=OnResponse},
+               Method, Path, Query, Version, Headers, Host, Port) ->
+       case Transport:peername(Socket) of
+               {ok, Peer} ->
+                       Req = cowboy_req:new(Socket, Transport, Peer, Method, Path,
+                               Query, Version, Headers, Host, Port, Buffer,
+                               ReqKeepalive < MaxKeepalive, Compress, OnResponse),
+                       onrequest(Req, State);
+               {error, _} ->
+                       %% Couldn't read the peer address; connection is gone.
+                       terminate(State)
+       end.
+
+%% Call the global onrequest callback. The callback can send a reply,
+%% in which case we consider the request handled and move on to the next
+%% one. Note that since we haven't dispatched yet, we don't know the
+%% handler, host_info, path_info or bindings yet.
+-spec onrequest(cowboy_req:req(), #state{}) -> ok.
+onrequest(Req, State=#state{onrequest=undefined}) ->
+       execute(Req, State);
+onrequest(Req, State=#state{onrequest=OnRequest}) ->
+       Req2 = OnRequest(Req),
+       case cowboy_req:get(resp_state, Req2) of
+               waiting -> execute(Req2, State);
+               _ -> next_request(Req2, State, ok)
+       end.
+
+-spec execute(cowboy_req:req(), #state{}) -> ok.
+execute(Req, State=#state{middlewares=Middlewares, env=Env}) ->
+       execute(Req, State, Env, Middlewares).
+
+-spec execute(cowboy_req:req(), #state{}, cowboy_middleware:env(), [module()])
+       -> ok.
+execute(Req, State, Env, []) ->
+       next_request(Req, State, get_value(result, Env, ok));
+execute(Req, State, Env, [Middleware|Tail]) ->
+       case Middleware:execute(Req, Env) of
+               {ok, Req2, Env2} ->
+                       execute(Req2, State, Env2, Tail);
+               {suspend, Module, Function, Args} ->
+                       erlang:hibernate(?MODULE, resume,
+                               [State, Env, Tail, Module, Function, Args]);
+               {halt, Req2} ->
+                       next_request(Req2, State, ok);
+               {error, Code, Req2} ->
+                       error_terminate(Code, Req2, State)
+       end.
+
+-spec resume(#state{}, cowboy_middleware:env(), [module()],
+       module(), module(), [any()]) -> ok.
+resume(State, Env, Tail, Module, Function, Args) ->
+       case apply(Module, Function, Args) of
+               {ok, Req2, Env2} ->
+                       execute(Req2, State, Env2, Tail);
+               {suspend, Module2, Function2, Args2} ->
+                       erlang:hibernate(?MODULE, resume,
+                               [State, Env, Tail, Module2, Function2, Args2]);
+               {halt, Req2} ->
+                       next_request(Req2, State, ok);
+               {error, Code, Req2} ->
+                       error_terminate(Code, Req2, State)
+       end.
+
+-spec next_request(cowboy_req:req(), #state{}, any()) -> ok.
+next_request(Req, State=#state{req_keepalive=Keepalive, timeout=Timeout},
+               HandlerRes) ->
+       cowboy_req:ensure_response(Req, 204),
+       %% If we are going to close the connection,
+       %% we do not want to attempt to skip the body.
+       case cowboy_req:get(connection, Req) of
+               close ->
+                       terminate(State);
+               _ ->
+                       %% Skip the body if it is reasonably sized. Close otherwise.
+                       Buffer = case cowboy_req:body(Req) of
+                               {ok, _, Req2} -> cowboy_req:get(buffer, Req2);
+                               _ -> close
+                       end,
+                       %% Flush the resp_sent message before moving on.
+                       if HandlerRes =:= ok, Buffer =/= close ->
+                                       receive {cowboy_req, resp_sent} -> ok after 0 -> ok end,
+                                       ?MODULE:parse_request(Buffer,
+                                               State#state{req_keepalive=Keepalive + 1,
+                                               until=until(Timeout)}, 0);
+                               true ->
+                                       terminate(State)
+                       end
+       end.
+
+-spec error_terminate(cowboy:http_status(), #state{}) -> ok.
+error_terminate(Status, State=#state{socket=Socket, transport=Transport,
+               compress=Compress, onresponse=OnResponse}) ->
+       error_terminate(Status, cowboy_req:new(Socket, Transport,
+               undefined, <<"GET">>, <<>>, <<>>, 'HTTP/1.1', [], <<>>,
+               undefined, <<>>, false, Compress, OnResponse), State).
+
+-spec error_terminate(cowboy:http_status(), cowboy_req:req(), #state{}) -> ok.
+error_terminate(Status, Req, State) ->
+       _ = cowboy_req:reply(Status, Req),
+       terminate(State).
+
+-spec terminate(#state{}) -> ok.
+terminate(#state{socket=Socket, transport=Transport}) ->
+       Transport:close(Socket),
+       ok.
diff --git a/deps/cowboy/src/cowboy_req.erl b/deps/cowboy/src/cowboy_req.erl
new file mode 100644 (file)
index 0000000..fcc9744
--- /dev/null
@@ -0,0 +1,1385 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_req).
+
+%% Request API.
+-export([new/14]).
+-export([method/1]).
+-export([version/1]).
+-export([peer/1]).
+-export([host/1]).
+-export([host_info/1]).
+-export([port/1]).
+-export([path/1]).
+-export([path_info/1]).
+-export([qs/1]).
+-export([qs_val/2]).
+-export([qs_val/3]).
+-export([qs_vals/1]).
+-export([host_url/1]).
+-export([url/1]).
+-export([binding/2]).
+-export([binding/3]).
+-export([bindings/1]).
+-export([header/2]).
+-export([header/3]).
+-export([headers/1]).
+-export([parse_header/2]).
+-export([parse_header/3]).
+-export([cookie/2]).
+-export([cookie/3]).
+-export([cookies/1]).
+-export([meta/2]).
+-export([meta/3]).
+-export([set_meta/3]).
+
+%% Request body API.
+-export([has_body/1]).
+-export([body_length/1]).
+-export([body/1]).
+-export([body/2]).
+-export([body_qs/1]).
+-export([body_qs/2]).
+
+%% Multipart API.
+-export([part/1]).
+-export([part/2]).
+-export([part_body/1]).
+-export([part_body/2]).
+
+%% Response API.
+-export([set_resp_cookie/4]).
+-export([set_resp_header/3]).
+-export([set_resp_body/2]).
+-export([set_resp_body_fun/2]).
+-export([set_resp_body_fun/3]).
+-export([has_resp_header/2]).
+-export([has_resp_body/1]).
+-export([delete_resp_header/2]).
+-export([reply/2]).
+-export([reply/3]).
+-export([reply/4]).
+-export([chunked_reply/2]).
+-export([chunked_reply/3]).
+-export([chunk/2]).
+-export([upgrade_reply/3]).
+-export([continue/1]).
+-export([maybe_reply/2]).
+-export([ensure_response/2]).
+
+%% Private setter/getter API.
+-export([append_buffer/2]).
+-export([get/2]).
+-export([set/2]).
+-export([set_bindings/4]).
+
+%% Misc API.
+-export([compact/1]).
+-export([lock/1]).
+-export([to_list/1]).
+
+-type cookie_opts() :: cow_cookie:cookie_opts().
+-export_type([cookie_opts/0]).
+
+-type content_decode_fun() :: fun((binary())
+       -> {ok, binary()}
+       | {error, atom()}).
+-type transfer_decode_fun() :: fun((binary(), any())
+       -> cow_http_te:decode_ret()).
+
+-type body_opts() :: [{continue, boolean()}
+       | {length, non_neg_integer()}
+       | {read_length, non_neg_integer()}
+       | {read_timeout, timeout()}
+       | {transfer_decode, transfer_decode_fun(), any()}
+       | {content_decode, content_decode_fun()}].
+-export_type([body_opts/0]).
+
+-type resp_body_fun() :: fun((any(), module()) -> ok).
+-type send_chunk_fun() :: fun((iodata()) -> ok | {error, atom()}).
+-type resp_chunked_fun() :: fun((send_chunk_fun()) -> ok).
+
+-record(http_req, {
+       %% Transport.
+       socket = undefined :: any(),
+       transport = undefined :: undefined | module(),
+       connection = keepalive :: keepalive | close,
+
+       %% Request.
+       pid = undefined :: pid(),
+       method = <<"GET">> :: binary(),
+       version = 'HTTP/1.1' :: cowboy:http_version(),
+       peer = undefined :: undefined | {inet:ip_address(), inet:port_number()},
+       host = undefined :: undefined | binary(),
+       host_info = undefined :: undefined | cowboy_router:tokens(),
+       port = undefined :: undefined | inet:port_number(),
+       path = undefined :: binary(),
+       path_info = undefined :: undefined | cowboy_router:tokens(),
+       qs = undefined :: binary(),
+       qs_vals = undefined :: undefined | list({binary(), binary() | true}),
+       bindings = undefined :: undefined | cowboy_router:bindings(),
+       headers = [] :: cowboy:http_headers(),
+       p_headers = [] :: [any()],
+       cookies = undefined :: undefined | [{binary(), binary()}],
+       meta = [] :: [{atom(), any()}],
+
+       %% Request body.
+       body_state = waiting :: waiting | done | {stream, non_neg_integer(),
+               transfer_decode_fun(), any(), content_decode_fun()},
+       buffer = <<>> :: binary(),
+       multipart = undefined :: undefined | {binary(), binary()},
+
+       %% Response.
+       resp_compress = false :: boolean(),
+       resp_state = waiting :: locked | waiting | waiting_stream
+               | chunks | stream | done,
+       resp_headers = [] :: cowboy:http_headers(),
+       resp_body = <<>> :: iodata() | resp_body_fun()
+               | {non_neg_integer(), resp_body_fun()}
+               | {chunked, resp_chunked_fun()},
+
+       %% Functions.
+       onresponse = undefined :: undefined | already_called
+               | cowboy:onresponse_fun()
+}).
+
+-opaque req() :: #http_req{}.
+-export_type([req/0]).
+
+%% Request API.
+
+-spec new(any(), module(),
+       undefined | {inet:ip_address(), inet:port_number()},
+       binary(), binary(), binary(),
+       cowboy:http_version(), cowboy:http_headers(), binary(),
+       inet:port_number() | undefined, binary(), boolean(), boolean(),
+       undefined | cowboy:onresponse_fun())
+       -> req().
+new(Socket, Transport, Peer, Method, Path, Query,
+               Version, Headers, Host, Port, Buffer, CanKeepalive,
+               Compress, OnResponse) ->
+       Req = #http_req{socket=Socket, transport=Transport, pid=self(), peer=Peer,
+               method=Method, path=Path, qs=Query, version=Version,
+               headers=Headers, host=Host, port=Port, buffer=Buffer,
+               resp_compress=Compress, onresponse=OnResponse},
+       case CanKeepalive of
+               false ->
+                       Req#http_req{connection=close};
+               true ->
+                       case lists:keyfind(<<"connection">>, 1, Headers) of
+                               false ->
+                                       case Version of
+                                               'HTTP/1.1' -> Req; %% keepalive
+                                               'HTTP/1.0' -> Req#http_req{connection=close}
+                                       end;
+                               {_, ConnectionHeader} ->
+                                       Tokens = cow_http_hd:parse_connection(ConnectionHeader),
+                                       Connection = connection_to_atom(Tokens),
+                                       Req#http_req{connection=Connection,
+                                               p_headers=[{<<"connection">>, Tokens}]}
+                       end
+       end.
+
+-spec method(Req) -> {binary(), Req} when Req::req().
+method(Req) ->
+       {Req#http_req.method, Req}.
+
+-spec version(Req) -> {cowboy:http_version(), Req} when Req::req().
+version(Req) ->
+       {Req#http_req.version, Req}.
+
+-spec peer(Req)
+       -> {{inet:ip_address(), inet:port_number()}, Req}
+       when Req::req().
+peer(Req) ->
+       {Req#http_req.peer, Req}.
+
+-spec host(Req) -> {binary(), Req} when Req::req().
+host(Req) ->
+       {Req#http_req.host, Req}.
+
+-spec host_info(Req)
+       -> {cowboy_router:tokens() | undefined, Req} when Req::req().
+host_info(Req) ->
+       {Req#http_req.host_info, Req}.
+
+-spec port(Req) -> {inet:port_number(), Req} when Req::req().
+port(Req) ->
+       {Req#http_req.port, Req}.
+
+-spec path(Req) -> {binary(), Req} when Req::req().
+path(Req) ->
+       {Req#http_req.path, Req}.
+
+-spec path_info(Req)
+       -> {cowboy_router:tokens() | undefined, Req} when Req::req().
+path_info(Req) ->
+       {Req#http_req.path_info, Req}.
+
+-spec qs(Req) -> {binary(), Req} when Req::req().
+qs(Req) ->
+       {Req#http_req.qs, Req}.
+
+-spec qs_val(binary(), Req)
+       -> {binary() | true | undefined, Req} when Req::req().
+qs_val(Name, Req) when is_binary(Name) ->
+       qs_val(Name, Req, undefined).
+
+-spec qs_val(binary(), Req, Default)
+       -> {binary() | true | Default, Req} when Req::req(), Default::any().
+qs_val(Name, Req=#http_req{qs=RawQs, qs_vals=undefined}, Default)
+               when is_binary(Name) ->
+       QsVals = cow_qs:parse_qs(RawQs),
+       qs_val(Name, Req#http_req{qs_vals=QsVals}, Default);
+qs_val(Name, Req, Default) ->
+       case lists:keyfind(Name, 1, Req#http_req.qs_vals) of
+               {Name, Value} -> {Value, Req};
+               false -> {Default, Req}
+       end.
+
+-spec qs_vals(Req) -> {list({binary(), binary() | true}), Req} when Req::req().
+qs_vals(Req=#http_req{qs=RawQs, qs_vals=undefined}) ->
+       QsVals = cow_qs:parse_qs(RawQs),
+       qs_vals(Req#http_req{qs_vals=QsVals});
+qs_vals(Req=#http_req{qs_vals=QsVals}) ->
+       {QsVals, Req}.
+
+%% The URL includes the scheme, host and port only.
+-spec host_url(Req) -> {undefined | binary(), Req} when Req::req().
+host_url(Req=#http_req{port=undefined}) ->
+       {undefined, Req};
+host_url(Req=#http_req{transport=Transport, host=Host, port=Port}) ->
+       TransportName = Transport:name(),
+       Secure = case TransportName of
+               ssl -> <<"s">>;
+               _ -> <<>>
+       end,
+       PortBin = case {TransportName, Port} of
+               {ssl, 443} -> <<>>;
+               {tcp, 80} -> <<>>;
+               _ -> << ":", (integer_to_binary(Port))/binary >>
+       end,
+       {<< "http", Secure/binary, "://", Host/binary, PortBin/binary >>, Req}.
+
+%% The URL includes the scheme, host, port, path and query string.
+-spec url(Req) -> {undefined | binary(), Req} when Req::req().
+url(Req=#http_req{}) ->
+       {HostURL, Req2} = host_url(Req),
+       url(HostURL, Req2).
+
+url(undefined, Req=#http_req{}) ->
+       {undefined, Req};
+url(HostURL, Req=#http_req{path=Path, qs=QS}) ->
+       QS2 = case QS of
+               <<>> -> <<>>;
+               _ -> << "?", QS/binary >>
+       end,
+       {<< HostURL/binary, Path/binary, QS2/binary >>, Req}.
+
+-spec binding(atom(), Req) -> {any() | undefined, Req} when Req::req().
+binding(Name, Req) when is_atom(Name) ->
+       binding(Name, Req, undefined).
+
+-spec binding(atom(), Req, Default)
+       -> {any() | Default, Req} when Req::req(), Default::any().
+binding(Name, Req, Default) when is_atom(Name) ->
+       case lists:keyfind(Name, 1, Req#http_req.bindings) of
+               {Name, Value} -> {Value, Req};
+               false -> {Default, Req}
+       end.
+
+-spec bindings(Req) -> {[{atom(), any()}], Req} when Req::req().
+bindings(Req) ->
+       {Req#http_req.bindings, Req}.
+
+-spec header(binary(), Req)
+       -> {binary() | undefined, Req} when Req::req().
+header(Name, Req) ->
+       header(Name, Req, undefined).
+
+-spec header(binary(), Req, Default)
+       -> {binary() | Default, Req} when Req::req(), Default::any().
+header(Name, Req, Default) ->
+       case lists:keyfind(Name, 1, Req#http_req.headers) of
+               {Name, Value} -> {Value, Req};
+               false -> {Default, Req}
+       end.
+
+-spec headers(Req) -> {cowboy:http_headers(), Req} when Req::req().
+headers(Req) ->
+       {Req#http_req.headers, Req}.
+
+-spec parse_header(binary(), Req)
+       -> {ok, any(), Req} | {undefined, binary(), Req}
+       | {error, badarg} when Req::req().
+parse_header(Name, Req=#http_req{p_headers=PHeaders}) ->
+       case lists:keyfind(Name, 1, PHeaders) of
+               false -> parse_header(Name, Req, parse_header_default(Name));
+               {Name, Value} -> {ok, Value, Req}
+       end.
+
+-spec parse_header_default(binary()) -> any().
+parse_header_default(<<"transfer-encoding">>) -> [<<"identity">>];
+parse_header_default(_Name) -> undefined.
+
+-spec parse_header(binary(), Req, any())
+       -> {ok, any(), Req} | {undefined, binary(), Req}
+       | {error, badarg} when Req::req().
+parse_header(Name = <<"accept">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:list(Value, fun cowboy_http:media_range/2)
+               end);
+parse_header(Name = <<"accept-charset">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:nonempty_list(Value, fun cowboy_http:conneg/2)
+               end);
+parse_header(Name = <<"accept-encoding">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:list(Value, fun cowboy_http:conneg/2)
+               end);
+parse_header(Name = <<"accept-language">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:nonempty_list(Value, fun cowboy_http:language_range/2)
+               end);
+parse_header(Name = <<"authorization">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:token_ci(Value, fun cowboy_http:authorization/2)
+               end);
+parse_header(Name = <<"content-length">>, Req, Default) ->
+       parse_header(Name, Req, Default, fun cow_http_hd:parse_content_length/1);
+parse_header(Name = <<"content-type">>, Req, Default) ->
+       parse_header(Name, Req, Default, fun cowboy_http:content_type/1);
+parse_header(Name = <<"cookie">>, Req, Default) ->
+       parse_header(Name, Req, Default, fun cow_cookie:parse_cookie/1);
+parse_header(Name = <<"expect">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:nonempty_list(Value, fun cowboy_http:expectation/2)
+               end);
+parse_header(Name, Req, Default)
+               when Name =:= <<"if-match">>;
+                       Name =:= <<"if-none-match">> ->
+       parse_header(Name, Req, Default, fun cowboy_http:entity_tag_match/1);
+parse_header(Name, Req, Default)
+               when Name =:= <<"if-modified-since">>;
+                       Name =:= <<"if-unmodified-since">> ->
+       parse_header(Name, Req, Default, fun cowboy_http:http_date/1);
+parse_header(Name = <<"range">>, Req, Default) ->
+       parse_header(Name, Req, Default, fun cowboy_http:range/1);
+parse_header(Name, Req, Default)
+               when Name =:= <<"sec-websocket-protocol">>;
+                       Name =:= <<"x-forwarded-for">> ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:nonempty_list(Value, fun cowboy_http:token/2)
+               end);
+parse_header(Name = <<"transfer-encoding">>, Req, Default) ->
+       parse_header(Name, Req, Default, fun cow_http_hd:parse_transfer_encoding/1);
+%% @todo Product version.
+parse_header(Name = <<"upgrade">>, Req, Default) ->
+       parse_header(Name, Req, Default,
+               fun (Value) ->
+                       cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2)
+               end);
+parse_header(Name = <<"sec-websocket-extensions">>, Req, Default) ->
+       parse_header(Name, Req, Default, fun cowboy_http:parameterized_tokens/1);
+parse_header(Name, Req, Default) ->
+       {Value, Req2} = header(Name, Req, Default),
+       {undefined, Value, Req2}.
+
+parse_header(Name, Req=#http_req{p_headers=PHeaders}, Default, Fun) ->
+       case header(Name, Req) of
+               {undefined, Req2} ->
+                       {ok, Default, Req2#http_req{p_headers=[{Name, Default}|PHeaders]}};
+               {Value, Req2} ->
+                       case Fun(Value) of
+                               {error, badarg} ->
+                                       {error, badarg};
+                               P ->
+                                       {ok, P, Req2#http_req{p_headers=[{Name, P}|PHeaders]}}
+                       end
+       end.
+
+-spec cookie(binary(), Req)
+       -> {binary() | undefined, Req} when Req::req().
+cookie(Name, Req) when is_binary(Name) ->
+       cookie(Name, Req, undefined).
+
+-spec cookie(binary(), Req, Default)
+       -> {binary() | Default, Req} when Req::req(), Default::any().
+cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) ->
+       case parse_header(<<"cookie">>, Req) of
+               {ok, undefined, Req2} ->
+                       {Default, Req2#http_req{cookies=[]}};
+               {ok, Cookies, Req2} ->
+                       cookie(Name, Req2#http_req{cookies=Cookies}, Default)
+       end;
+cookie(Name, Req, Default) ->
+       case lists:keyfind(Name, 1, Req#http_req.cookies) of
+               {Name, Value} -> {Value, Req};
+               false -> {Default, Req}
+       end.
+
+-spec cookies(Req) -> {list({binary(), binary()}), Req} when Req::req().
+cookies(Req=#http_req{cookies=undefined}) ->
+       case parse_header(<<"cookie">>, Req) of
+               {ok, undefined, Req2} ->
+                       {[], Req2#http_req{cookies=[]}};
+               {ok, Cookies, Req2} ->
+                       cookies(Req2#http_req{cookies=Cookies});
+               %% Flash player incorrectly sends an empty Cookie header.
+               {error, badarg} ->
+                       {[], Req#http_req{cookies=[]}}
+       end;
+cookies(Req=#http_req{cookies=Cookies}) ->
+       {Cookies, Req}.
+
+-spec meta(atom(), Req) -> {any() | undefined, Req} when Req::req().
+meta(Name, Req) ->
+       meta(Name, Req, undefined).
+
+-spec meta(atom(), Req, any()) -> {any(), Req} when Req::req().
+meta(Name, Req, Default) ->
+       case lists:keyfind(Name, 1, Req#http_req.meta) of
+               {Name, Value} -> {Value, Req};
+               false -> {Default, Req}
+       end.
+
+-spec set_meta(atom(), any(), Req) -> Req when Req::req().
+set_meta(Name, Value, Req=#http_req{meta=Meta}) ->
+       Req#http_req{meta=lists:keystore(Name, 1, Meta, {Name, Value})}.
+
+%% Request Body API.
+
+-spec has_body(req()) -> boolean().
+has_body(Req) ->
+       case lists:keyfind(<<"content-length">>, 1, Req#http_req.headers) of
+               {_, <<"0">>} ->
+                       false;
+               {_, _} ->
+                       true;
+               _ ->
+                       lists:keymember(<<"transfer-encoding">>, 1, Req#http_req.headers)
+       end.
+
+%% The length may not be known if Transfer-Encoding is not identity,
+%% and the body hasn't been read at the time of the call.
+-spec body_length(Req) -> {undefined | non_neg_integer(), Req} when Req::req().
+body_length(Req) ->
+       case parse_header(<<"transfer-encoding">>, Req) of
+               {ok, [<<"identity">>], Req2} ->
+                       {ok, Length, Req3} = parse_header(<<"content-length">>, Req2, 0),
+                       {Length, Req3};
+               {ok, _, Req2} ->
+                       {undefined, Req2}
+       end.
+
+-spec body(Req)
+       -> {ok, binary(), Req} | {more, binary(), Req}
+       | {error, atom()} when Req::req().
+body(Req) ->
+       body(Req, []).
+
+-spec body(Req, body_opts())
+       -> {ok, binary(), Req} | {more, binary(), Req}
+       | {error, atom()} when Req::req().
+body(Req=#http_req{body_state=waiting}, Opts) ->
+       %% Send a 100 continue if needed (enabled by default).
+       Req1 = case lists:keyfind(continue, 1, Opts) of
+               {_, false} ->
+                       Req;
+               _ ->
+                       {ok, ExpectHeader, Req0} = parse_header(<<"expect">>, Req),
+                       ok = case ExpectHeader of
+                               [<<"100-continue">>] -> continue(Req0);
+                               _ -> ok
+                       end,
+                       Req0
+       end,
+       %% Initialize body streaming state.
+       CFun = case lists:keyfind(content_decode, 1, Opts) of
+               false ->
+                       fun cowboy_http:ce_identity/1;
+               {_, CFun0} ->
+                       CFun0
+       end,
+       case lists:keyfind(transfer_decode, 1, Opts) of
+               false ->
+                       case parse_header(<<"transfer-encoding">>, Req1) of
+                               {ok, [<<"chunked">>], Req2} ->
+                                       body(Req2#http_req{body_state={stream, 0,
+                                               fun cow_http_te:stream_chunked/2, {0, 0}, CFun}}, Opts);
+                               {ok, [<<"identity">>], Req2} ->
+                                       {Len, Req3} = body_length(Req2),
+                                       case Len of
+                                               0 ->
+                                                       {ok, <<>>, Req3#http_req{body_state=done}};
+                                               _ ->
+                                                       body(Req3#http_req{body_state={stream, Len,
+                                                               fun cow_http_te:stream_identity/2, {0, Len},
+                                                               CFun}}, Opts)
+                                       end
+                       end;
+               {_, TFun, TState} ->
+                       body(Req1#http_req{body_state={stream, 0,
+                               TFun, TState, CFun}}, Opts)
+       end;
+body(Req=#http_req{body_state=done}, _) ->
+       {ok, <<>>, Req};
+body(Req, Opts) ->
+       ChunkLen = case lists:keyfind(length, 1, Opts) of
+               false -> 8000000;
+               {_, ChunkLen0} -> ChunkLen0
+       end,
+       ReadLen = case lists:keyfind(read_length, 1, Opts) of
+               false -> 1000000;
+               {_, ReadLen0} -> ReadLen0
+       end,
+       ReadTimeout = case lists:keyfind(read_timeout, 1, Opts) of
+               false -> 15000;
+               {_, ReadTimeout0} -> ReadTimeout0
+       end,
+       body_loop(Req, ReadTimeout, ReadLen, ChunkLen, <<>>).
+
+body_loop(Req=#http_req{buffer=Buffer, body_state={stream, Length, _, _, _}},
+               ReadTimeout, ReadLength, ChunkLength, Acc) ->
+       {Tag, Res, Req2} = case Buffer of
+               <<>> ->
+                       body_recv(Req, ReadTimeout, min(Length, ReadLength));
+               _ ->
+                       body_decode(Req, ReadTimeout)
+       end,
+       case {Tag, Res} of
+               {ok, {ok, Data}} ->
+                       {ok, << Acc/binary, Data/binary >>, Req2};
+               {more, {ok, Data}} ->
+                       Acc2 = << Acc/binary, Data/binary >>,
+                       case byte_size(Acc2) >= ChunkLength of
+                               true -> {more, Acc2, Req2};
+                               false -> body_loop(Req2, ReadTimeout, ReadLength, ChunkLength, Acc2)
+                       end;
+               _ -> %% Error.
+                       Res
+       end.
+
+body_recv(Req=#http_req{transport=Transport, socket=Socket, buffer=Buffer},
+               ReadTimeout, ReadLength) ->
+       case Transport:recv(Socket, ReadLength, ReadTimeout) of
+               {ok, Data} ->
+                       body_decode(Req#http_req{buffer= << Buffer/binary, Data/binary >>},
+                               ReadTimeout);
+               Error = {error, _} ->
+                       {error, Error, Req}
+       end.
+
+%% Two decodings happen. First a decoding function is applied to the
+%% transferred data, and then another is applied to the actual content.
+%%
+%% Transfer encoding is generally used for chunked bodies. The decoding
+%% function uses a state to keep track of how much it has read, which is
+%% also initialized through this function.
+%%
+%% Content encoding is generally used for compression.
+%%
+%% @todo Handle chunked after-the-facts headers.
+%% @todo Depending on the length returned we might want to 0 or +5 it.
+body_decode(Req=#http_req{buffer=Data, body_state={stream, _,
+               TDecode, TState, CDecode}}, ReadTimeout) ->
+       case TDecode(Data, TState) of
+               more ->
+                       body_recv(Req#http_req{body_state={stream, 0,
+                               TDecode, TState, CDecode}}, ReadTimeout, 0);
+               {more, Data2, TState2} ->
+                       {more, CDecode(Data2), Req#http_req{body_state={stream, 0,
+                               TDecode, TState2, CDecode}, buffer= <<>>}};
+               {more, Data2, Length, TState2} when is_integer(Length) ->
+                       {more, CDecode(Data2), Req#http_req{body_state={stream, Length,
+                               TDecode, TState2, CDecode}, buffer= <<>>}};
+               {more, Data2, Rest, TState2} ->
+                       {more, CDecode(Data2), Req#http_req{body_state={stream, 0,
+                               TDecode, TState2, CDecode}, buffer=Rest}};
+               {done, TotalLength, Rest} ->
+                       {ok, {ok, <<>>}, body_decode_end(Req, TotalLength, Rest)};
+               {done, Data2, TotalLength, Rest} ->
+                       {ok, CDecode(Data2), body_decode_end(Req, TotalLength, Rest)}
+       end.
+
+body_decode_end(Req=#http_req{headers=Headers, p_headers=PHeaders},
+               TotalLength, Rest) ->
+       Headers2 = lists:keystore(<<"content-length">>, 1, Headers,
+               {<<"content-length">>, integer_to_binary(TotalLength)}),
+       %% At this point we just assume TEs were all decoded.
+       Headers3 = lists:keydelete(<<"transfer-encoding">>, 1, Headers2),
+       PHeaders2 = lists:keystore(<<"content-length">>, 1, PHeaders,
+               {<<"content-length">>, TotalLength}),
+       PHeaders3 = lists:keydelete(<<"transfer-encoding">>, 1, PHeaders2),
+       Req#http_req{buffer=Rest, body_state=done,
+               headers=Headers3, p_headers=PHeaders3}.
+
+-spec body_qs(Req)
+       -> {ok, [{binary(), binary() | true}], Req} | {error, atom()}
+       when Req::req().
+body_qs(Req) ->
+       body_qs(Req, [
+               {length, 64000},
+               {read_length, 64000},
+               {read_timeout, 5000}]).
+
+-spec body_qs(Req, body_opts()) -> {ok, [{binary(), binary() | true}], Req}
+       | {badlength, Req} | {error, atom()} when Req::req().
+body_qs(Req, Opts) ->
+       case body(Req, Opts) of
+               {ok, Body, Req2} ->
+                       {ok, cow_qs:parse_qs(Body), Req2};
+               {more, _, Req2} ->
+                       {badlength, Req2};
+               {error, Reason} ->
+                       {error, Reason}
+       end.
+
+%% Multipart API.
+
+-spec part(Req)
+       -> {ok, cow_multipart:headers(), Req} | {done, Req}
+       when Req::req().
+part(Req) ->
+       part(Req, [
+               {length, 64000},
+               {read_length, 64000},
+               {read_timeout, 5000}]).
+
+-spec part(Req, body_opts())
+       -> {ok, cow_multipart:headers(), Req} | {done, Req}
+       when Req::req().
+part(Req=#http_req{multipart=undefined}, Opts) ->
+       part(init_multipart(Req), Opts);
+part(Req, Opts) ->
+       {Data, Req2} = stream_multipart(Req, Opts),
+       part(Data, Opts, Req2).
+
+part(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}) ->
+       case cow_multipart:parse_headers(Buffer, Boundary) of
+               more ->
+                       {Data, Req2} = stream_multipart(Req, Opts),
+                       part(<< Buffer/binary, Data/binary >>, Opts, Req2);
+               {more, Buffer2} ->
+                       {Data, Req2} = stream_multipart(Req, Opts),
+                       part(<< Buffer2/binary, Data/binary >>, Opts, Req2);
+               {ok, Headers, Rest} ->
+                       {ok, Headers, Req#http_req{multipart={Boundary, Rest}}};
+               %% Ignore epilogue.
+               {done, _} ->
+                       {done, Req#http_req{multipart=undefined}}
+       end.
+
+-spec part_body(Req)
+       -> {ok, binary(), Req} | {more, binary(), Req}
+       when Req::req().
+part_body(Req) ->
+       part_body(Req, []).
+
+-spec part_body(Req, body_opts())
+       -> {ok, binary(), Req} | {more, binary(), Req}
+       when Req::req().
+part_body(Req=#http_req{multipart=undefined}, Opts) ->
+       part_body(init_multipart(Req), Opts);
+part_body(Req, Opts) ->
+       part_body(<<>>, Opts, Req, <<>>).
+
+part_body(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}, Acc) ->
+       ChunkLen = case lists:keyfind(length, 1, Opts) of
+               false -> 8000000;
+               {_, ChunkLen0} -> ChunkLen0
+       end,
+       case byte_size(Acc) > ChunkLen of
+               true ->
+                       {more, Acc, Req#http_req{multipart={Boundary, Buffer}}};
+               false ->
+                       {Data, Req2} = stream_multipart(Req, Opts),
+                       case cow_multipart:parse_body(<< Buffer/binary, Data/binary >>, Boundary) of
+                               {ok, Body} ->
+                                       part_body(<<>>, Opts, Req2, << Acc/binary, Body/binary >>);
+                               {ok, Body, Rest} ->
+                                       part_body(Rest, Opts, Req2, << Acc/binary, Body/binary >>);
+                               done ->
+                                       {ok, Acc, Req2};
+                               {done, Body} ->
+                                       {ok, << Acc/binary, Body/binary >>, Req2};
+                               {done, Body, Rest} ->
+                                       {ok, << Acc/binary, Body/binary >>,
+                                               Req2#http_req{multipart={Boundary, Rest}}}
+                       end
+       end.
+
+init_multipart(Req) ->
+       {ok, {<<"multipart">>, _, Params}, Req2}
+               = parse_header(<<"content-type">>, Req),
+       {_, Boundary} = lists:keyfind(<<"boundary">>, 1, Params),
+       Req2#http_req{multipart={Boundary, <<>>}}.
+
+stream_multipart(Req=#http_req{body_state=BodyState, multipart={_, <<>>}}, Opts) ->
+       true = BodyState =/= done,
+       {_, Data, Req2} = body(Req, Opts),
+       {Data, Req2};
+stream_multipart(Req=#http_req{multipart={Boundary, Buffer}}, _) ->
+       {Buffer, Req#http_req{multipart={Boundary, <<>>}}}.
+
+%% Response API.
+
+%% The cookie name cannot contain any of the following characters:
+%%   =,;\s\t\r\n\013\014
+%%
+%% The cookie value cannot contain any of the following characters:
+%%   ,; \t\r\n\013\014
+-spec set_resp_cookie(iodata(), iodata(), cookie_opts(), Req)
+       -> Req when Req::req().
+set_resp_cookie(Name, Value, Opts, Req) ->
+       Cookie = cow_cookie:setcookie(Name, Value, Opts),
+       set_resp_header(<<"set-cookie">>, Cookie, Req).
+
+-spec set_resp_header(binary(), iodata(), Req)
+       -> Req when Req::req().
+set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) ->
+       Req#http_req{resp_headers=[{Name, Value}|RespHeaders]}.
+
+-spec set_resp_body(iodata(), Req) -> Req when Req::req().
+set_resp_body(Body, Req) ->
+       Req#http_req{resp_body=Body}.
+
+-spec set_resp_body_fun(resp_body_fun(), Req) -> Req when Req::req().
+set_resp_body_fun(StreamFun, Req) when is_function(StreamFun) ->
+       Req#http_req{resp_body=StreamFun}.
+
+%% If the body function crashes while writing the response body or writes
+%% fewer bytes than declared the behaviour is undefined.
+-spec set_resp_body_fun(non_neg_integer(), resp_body_fun(), Req)
+       -> Req when Req::req();
+       (chunked, resp_chunked_fun(), Req)
+       -> Req when Req::req().
+set_resp_body_fun(StreamLen, StreamFun, Req)
+               when is_integer(StreamLen), is_function(StreamFun) ->
+       Req#http_req{resp_body={StreamLen, StreamFun}};
+set_resp_body_fun(chunked, StreamFun, Req)
+               when is_function(StreamFun) ->
+       Req#http_req{resp_body={chunked, StreamFun}}.
+
+-spec has_resp_header(binary(), req()) -> boolean().
+has_resp_header(Name, #http_req{resp_headers=RespHeaders}) ->
+       lists:keymember(Name, 1, RespHeaders).
+
+-spec has_resp_body(req()) -> boolean().
+has_resp_body(#http_req{resp_body=RespBody}) when is_function(RespBody) ->
+       true;
+has_resp_body(#http_req{resp_body={chunked, _}}) ->
+       true;
+has_resp_body(#http_req{resp_body={Length, _}}) ->
+       Length > 0;
+has_resp_body(#http_req{resp_body=RespBody}) ->
+       iolist_size(RespBody) > 0.
+
+-spec delete_resp_header(binary(), Req)
+       -> Req when Req::req().
+delete_resp_header(Name, Req=#http_req{resp_headers=RespHeaders}) ->
+       RespHeaders2 = lists:keydelete(Name, 1, RespHeaders),
+       Req#http_req{resp_headers=RespHeaders2}.
+
+-spec reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req().
+reply(Status, Req=#http_req{resp_body=Body}) ->
+       reply(Status, [], Body, Req).
+
+-spec reply(cowboy:http_status(), cowboy:http_headers(), Req)
+       -> {ok, Req} when Req::req().
+reply(Status, Headers, Req=#http_req{resp_body=Body}) ->
+       reply(Status, Headers, Body, Req).
+
+-spec reply(cowboy:http_status(), cowboy:http_headers(),
+       iodata() | {non_neg_integer() | resp_body_fun()}, Req)
+       -> {ok, Req} when Req::req().
+reply(Status, Headers, Body, Req=#http_req{
+               socket=Socket, transport=Transport,
+               version=Version, connection=Connection,
+               method=Method, resp_compress=Compress,
+               resp_state=RespState, resp_headers=RespHeaders})
+               when RespState =:= waiting; RespState =:= waiting_stream ->
+       HTTP11Headers = if
+               Transport =/= cowboy_spdy, Version =:= 'HTTP/1.0', Connection =:= keepalive ->
+                       [{<<"connection">>, atom_to_connection(Connection)}];
+               Transport =/= cowboy_spdy, Version =:= 'HTTP/1.1', Connection =:= close ->
+                       [{<<"connection">>, atom_to_connection(Connection)}];
+               true ->
+                       []
+       end,
+       Req3 = case Body of
+               BodyFun when is_function(BodyFun) ->
+                       %% We stream the response body until we close the connection.
+                       RespConn = close,
+                       {RespType, Req2} = if
+                               Transport =:= cowboy_spdy ->
+                                       response(Status, Headers, RespHeaders, [
+                                               {<<"date">>, cowboy_clock:rfc1123()},
+                                               {<<"server">>, <<"Cowboy">>}
+                                       ], stream, Req);
+                               true ->
+                                       response(Status, Headers, RespHeaders, [
+                                               {<<"connection">>, <<"close">>},
+                                               {<<"date">>, cowboy_clock:rfc1123()},
+                                               {<<"server">>, <<"Cowboy">>},
+                                               {<<"transfer-encoding">>, <<"identity">>}
+                                       ], <<>>, Req)
+                       end,
+                       if      RespType =/= hook, Method =/= <<"HEAD">> ->
+                                       BodyFun(Socket, Transport);
+                               true -> ok
+                       end,
+                       Req2#http_req{connection=RespConn};
+               {chunked, BodyFun} ->
+                       %% We stream the response body in chunks.
+                       {RespType, Req2} = chunked_response(Status, Headers, Req),
+                       if      RespType =/= hook, Method =/= <<"HEAD">> ->
+                                       ChunkFun = fun(IoData) -> chunk(IoData, Req2) end,
+                                       BodyFun(ChunkFun),
+                                       %% Send the last chunk if chunked encoding was used.
+                                       if
+                                               Version =:= 'HTTP/1.0'; RespState =:= waiting_stream ->
+                                                       Req2;
+                                               true ->
+                                                       last_chunk(Req2)
+                                       end;
+                               true -> Req2
+                       end;
+               {ContentLength, BodyFun} ->
+                       %% We stream the response body for ContentLength bytes.
+                       RespConn = response_connection(Headers, Connection),
+                       {RespType, Req2} = response(Status, Headers, RespHeaders, [
+                                       {<<"content-length">>, integer_to_list(ContentLength)},
+                                       {<<"date">>, cowboy_clock:rfc1123()},
+                                       {<<"server">>, <<"Cowboy">>}
+                               |HTTP11Headers], stream, Req),
+                       if      RespType =/= hook, Method =/= <<"HEAD">> ->
+                                       BodyFun(Socket, Transport);
+                               true -> ok
+                       end,
+                       Req2#http_req{connection=RespConn};
+               _ when Compress ->
+                       RespConn = response_connection(Headers, Connection),
+                       Req2 = reply_may_compress(Status, Headers, Body, Req,
+                               RespHeaders, HTTP11Headers, Method),
+                       Req2#http_req{connection=RespConn};
+               _ ->
+                       RespConn = response_connection(Headers, Connection),
+                       Req2 = reply_no_compress(Status, Headers, Body, Req,
+                               RespHeaders, HTTP11Headers, Method, iolist_size(Body)),
+                       Req2#http_req{connection=RespConn}
+       end,
+       {ok, Req3#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
+
+reply_may_compress(Status, Headers, Body, Req,
+               RespHeaders, HTTP11Headers, Method) ->
+       BodySize = iolist_size(Body),
+       case parse_header(<<"accept-encoding">>, Req) of
+               {ok, Encodings, Req2} ->
+                       CanGzip = (BodySize > 300)
+                               andalso (false =:= lists:keyfind(<<"content-encoding">>,
+                                       1, Headers))
+                               andalso (false =:= lists:keyfind(<<"content-encoding">>,
+                                       1, RespHeaders))
+                               andalso (false =:= lists:keyfind(<<"transfer-encoding">>,
+                                       1, Headers))
+                               andalso (false =:= lists:keyfind(<<"transfer-encoding">>,
+                                       1, RespHeaders))
+                               andalso (Encodings =/= undefined)
+                               andalso (false =/= lists:keyfind(<<"gzip">>, 1, Encodings)),
+                       case CanGzip of
+                               true ->
+                                       GzBody = zlib:gzip(Body),
+                                       {_, Req3} = response(Status, Headers, RespHeaders, [
+                                                       {<<"content-length">>, integer_to_list(byte_size(GzBody))},
+                                                       {<<"content-encoding">>, <<"gzip">>},
+                                                       {<<"date">>, cowboy_clock:rfc1123()},
+                                                       {<<"server">>, <<"Cowboy">>}
+                                               |HTTP11Headers],
+                                               case Method of <<"HEAD">> -> <<>>; _ -> GzBody end,
+                                               Req2),
+                                       Req3;
+                               false ->
+                                       reply_no_compress(Status, Headers, Body, Req,
+                                               RespHeaders, HTTP11Headers, Method, BodySize)
+                       end;
+               {error, badarg} ->
+                       reply_no_compress(Status, Headers, Body, Req,
+                               RespHeaders, HTTP11Headers, Method, BodySize)
+       end.
+
+reply_no_compress(Status, Headers, Body, Req,
+               RespHeaders, HTTP11Headers, Method, BodySize) ->
+       {_, Req2} = response(Status, Headers, RespHeaders, [
+                       {<<"content-length">>, integer_to_list(BodySize)},
+                       {<<"date">>, cowboy_clock:rfc1123()},
+                       {<<"server">>, <<"Cowboy">>}
+               |HTTP11Headers],
+               case Method of <<"HEAD">> -> <<>>; _ -> Body end,
+               Req),
+       Req2.
+
+-spec chunked_reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req().
+chunked_reply(Status, Req) ->
+       chunked_reply(Status, [], Req).
+
+-spec chunked_reply(cowboy:http_status(), cowboy:http_headers(), Req)
+       -> {ok, Req} when Req::req().
+chunked_reply(Status, Headers, Req) ->
+       {_, Req2} = chunked_response(Status, Headers, Req),
+       {ok, Req2}.
+
+-spec chunk(iodata(), req()) -> ok | {error, atom()}.
+chunk(_Data, #http_req{method= <<"HEAD">>}) ->
+       ok;
+chunk(Data, #http_req{socket=Socket, transport=cowboy_spdy,
+               resp_state=chunks}) ->
+       cowboy_spdy:stream_data(Socket, Data);
+chunk(Data, #http_req{socket=Socket, transport=Transport,
+               resp_state=stream}) ->
+       Transport:send(Socket, Data);
+chunk(Data, #http_req{socket=Socket, transport=Transport,
+               resp_state=chunks}) ->
+       Transport:send(Socket, [integer_to_list(iolist_size(Data), 16),
+               <<"\r\n">>, Data, <<"\r\n">>]).
+
+%% If ever made public, need to send nothing if HEAD.
+-spec last_chunk(Req) -> Req when Req::req().
+last_chunk(Req=#http_req{socket=Socket, transport=cowboy_spdy}) ->
+       _ = cowboy_spdy:stream_close(Socket),
+       Req#http_req{resp_state=done};
+last_chunk(Req=#http_req{socket=Socket, transport=Transport}) ->
+       _ = Transport:send(Socket, <<"0\r\n\r\n">>),
+       Req#http_req{resp_state=done}.
+
+-spec upgrade_reply(cowboy:http_status(), cowboy:http_headers(), Req)
+       -> {ok, Req} when Req::req().
+upgrade_reply(Status, Headers, Req=#http_req{transport=Transport,
+               resp_state=waiting, resp_headers=RespHeaders})
+               when Transport =/= cowboy_spdy ->
+       {_, Req2} = response(Status, Headers, RespHeaders, [
+               {<<"connection">>, <<"Upgrade">>}
+       ], <<>>, Req),
+       {ok, Req2#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
+
+-spec continue(req()) -> ok | {error, atom()}.
+continue(#http_req{socket=Socket, transport=Transport,
+               version=Version}) ->
+       HTTPVer = atom_to_binary(Version, latin1),
+       Transport:send(Socket,
+               << HTTPVer/binary, " ", (status(100))/binary, "\r\n\r\n" >>).
+
+%% Meant to be used internally for sending errors after crashes.
+-spec maybe_reply([{module(), atom(), arity() | [term()], _}], req()) -> ok.
+maybe_reply(Stacktrace, Req) ->
+       receive
+               {cowboy_req, resp_sent} -> ok
+       after 0 ->
+               _ = do_maybe_reply(Stacktrace, Req),
+               ok
+       end.
+
+do_maybe_reply([
+               {cow_http_hd, _, _, _},
+               {cowboy_req, parse_header, _, _}|_], Req) ->
+       cowboy_req:reply(400, Req);
+do_maybe_reply(_, Req) ->
+       cowboy_req:reply(500, Req).
+
+-spec ensure_response(req(), cowboy:http_status()) -> ok.
+%% The response has already been fully sent to the client.
+ensure_response(#http_req{resp_state=done}, _) ->
+       ok;
+%% No response has been sent but everything apparently went fine.
+%% Reply with the status code found in the second argument.
+ensure_response(Req=#http_req{resp_state=RespState}, Status)
+               when RespState =:= waiting; RespState =:= waiting_stream ->
+       _ = reply(Status, [], [], Req),
+       ok;
+%% Terminate the chunked body for HTTP/1.1 only.
+ensure_response(#http_req{method= <<"HEAD">>}, _) ->
+       ok;
+ensure_response(Req=#http_req{resp_state=chunks}, _) ->
+       _ = last_chunk(Req),
+       ok;
+ensure_response(#http_req{}, _) ->
+       ok.
+
+%% Private setter/getter API.
+
+-spec append_buffer(binary(), Req) -> Req when Req::req().
+append_buffer(Suffix, Req=#http_req{buffer=Buffer}) ->
+       Req#http_req{buffer= << Buffer/binary, Suffix/binary >>}.
+
+-spec get(atom(), req()) -> any(); ([atom()], req()) -> any().
+get(List, Req) when is_list(List) ->
+       [g(Atom, Req) || Atom <- List];
+get(Atom, Req) when is_atom(Atom) ->
+       g(Atom, Req).
+
+g(bindings, #http_req{bindings=Ret}) -> Ret;
+g(body_state, #http_req{body_state=Ret}) -> Ret;
+g(buffer, #http_req{buffer=Ret}) -> Ret;
+g(connection, #http_req{connection=Ret}) -> Ret;
+g(cookies, #http_req{cookies=Ret}) -> Ret;
+g(headers, #http_req{headers=Ret}) -> Ret;
+g(host, #http_req{host=Ret}) -> Ret;
+g(host_info, #http_req{host_info=Ret}) -> Ret;
+g(meta, #http_req{meta=Ret}) -> Ret;
+g(method, #http_req{method=Ret}) -> Ret;
+g(multipart, #http_req{multipart=Ret}) -> Ret;
+g(onresponse, #http_req{onresponse=Ret}) -> Ret;
+g(p_headers, #http_req{p_headers=Ret}) -> Ret;
+g(path, #http_req{path=Ret}) -> Ret;
+g(path_info, #http_req{path_info=Ret}) -> Ret;
+g(peer, #http_req{peer=Ret}) -> Ret;
+g(pid, #http_req{pid=Ret}) -> Ret;
+g(port, #http_req{port=Ret}) -> Ret;
+g(qs, #http_req{qs=Ret}) -> Ret;
+g(qs_vals, #http_req{qs_vals=Ret}) -> Ret;
+g(resp_body, #http_req{resp_body=Ret}) -> Ret;
+g(resp_compress, #http_req{resp_compress=Ret}) -> Ret;
+g(resp_headers, #http_req{resp_headers=Ret}) -> Ret;
+g(resp_state, #http_req{resp_state=Ret}) -> Ret;
+g(socket, #http_req{socket=Ret}) -> Ret;
+g(transport, #http_req{transport=Ret}) -> Ret;
+g(version, #http_req{version=Ret}) -> Ret.
+
+-spec set([{atom(), any()}], Req) -> Req when Req::req().
+set([], Req) -> Req;
+set([{bindings, Val}|Tail], Req) -> set(Tail, Req#http_req{bindings=Val});
+set([{body_state, Val}|Tail], Req) -> set(Tail, Req#http_req{body_state=Val});
+set([{buffer, Val}|Tail], Req) -> set(Tail, Req#http_req{buffer=Val});
+set([{connection, Val}|Tail], Req) -> set(Tail, Req#http_req{connection=Val});
+set([{cookies, Val}|Tail], Req) -> set(Tail, Req#http_req{cookies=Val});
+set([{headers, Val}|Tail], Req) -> set(Tail, Req#http_req{headers=Val});
+set([{host, Val}|Tail], Req) -> set(Tail, Req#http_req{host=Val});
+set([{host_info, Val}|Tail], Req) -> set(Tail, Req#http_req{host_info=Val});
+set([{meta, Val}|Tail], Req) -> set(Tail, Req#http_req{meta=Val});
+set([{method, Val}|Tail], Req) -> set(Tail, Req#http_req{method=Val});
+set([{multipart, Val}|Tail], Req) -> set(Tail, Req#http_req{multipart=Val});
+set([{onresponse, Val}|Tail], Req) -> set(Tail, Req#http_req{onresponse=Val});
+set([{p_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{p_headers=Val});
+set([{path, Val}|Tail], Req) -> set(Tail, Req#http_req{path=Val});
+set([{path_info, Val}|Tail], Req) -> set(Tail, Req#http_req{path_info=Val});
+set([{peer, Val}|Tail], Req) -> set(Tail, Req#http_req{peer=Val});
+set([{pid, Val}|Tail], Req) -> set(Tail, Req#http_req{pid=Val});
+set([{port, Val}|Tail], Req) -> set(Tail, Req#http_req{port=Val});
+set([{qs, Val}|Tail], Req) -> set(Tail, Req#http_req{qs=Val});
+set([{qs_vals, Val}|Tail], Req) -> set(Tail, Req#http_req{qs_vals=Val});
+set([{resp_body, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_body=Val});
+set([{resp_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_headers=Val});
+set([{resp_state, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_state=Val});
+set([{socket, Val}|Tail], Req) -> set(Tail, Req#http_req{socket=Val});
+set([{transport, Val}|Tail], Req) -> set(Tail, Req#http_req{transport=Val});
+set([{version, Val}|Tail], Req) -> set(Tail, Req#http_req{version=Val}).
+
+-spec set_bindings(cowboy_router:tokens(), cowboy_router:tokens(),
+       cowboy_router:bindings(), Req) -> Req when Req::req().
+set_bindings(HostInfo, PathInfo, Bindings, Req) ->
+       Req#http_req{host_info=HostInfo, path_info=PathInfo,
+               bindings=Bindings}.
+
+%% Misc API.
+
+-spec compact(Req) -> Req when Req::req().
+compact(Req) ->
+       Req#http_req{host_info=undefined,
+               path_info=undefined, qs_vals=undefined,
+               bindings=undefined, headers=[],
+               p_headers=[], cookies=[]}.
+
+-spec lock(Req) -> Req when Req::req().
+lock(Req) ->
+       Req#http_req{resp_state=locked}.
+
+-spec to_list(req()) -> [{atom(), any()}].
+to_list(Req) ->
+       lists:zip(record_info(fields, http_req), tl(tuple_to_list(Req))).
+
+%% Internal.
+
+-spec chunked_response(cowboy:http_status(), cowboy:http_headers(), Req) ->
+       {normal | hook, Req} when Req::req().
+chunked_response(Status, Headers, Req=#http_req{
+               transport=cowboy_spdy, resp_state=waiting,
+               resp_headers=RespHeaders}) ->
+       {RespType, Req2} = response(Status, Headers, RespHeaders, [
+               {<<"date">>, cowboy_clock:rfc1123()},
+               {<<"server">>, <<"Cowboy">>}
+       ], stream, Req),
+       {RespType, Req2#http_req{resp_state=chunks,
+               resp_headers=[], resp_body= <<>>}};
+chunked_response(Status, Headers, Req=#http_req{
+               version=Version, connection=Connection,
+               resp_state=RespState, resp_headers=RespHeaders})
+               when RespState =:= waiting; RespState =:= waiting_stream ->
+       RespConn = response_connection(Headers, Connection),
+       HTTP11Headers = if
+               Version =:= 'HTTP/1.0', Connection =:= keepalive ->
+                       [{<<"connection">>, atom_to_connection(Connection)}];
+               Version =:= 'HTTP/1.0' -> [];
+               true ->
+                       MaybeTE = if
+                               RespState =:= waiting_stream -> [];
+                               true -> [{<<"transfer-encoding">>, <<"chunked">>}]
+                       end,
+                       if
+                               Connection =:= close ->
+                                       [{<<"connection">>, atom_to_connection(Connection)}|MaybeTE];
+                               true ->
+                                       MaybeTE
+                       end
+       end,
+       RespState2 = if
+               Version =:= 'HTTP/1.1', RespState =:= 'waiting' -> chunks;
+               true -> stream
+       end,
+       {RespType, Req2} = response(Status, Headers, RespHeaders, [
+               {<<"date">>, cowboy_clock:rfc1123()},
+               {<<"server">>, <<"Cowboy">>}
+       |HTTP11Headers], <<>>, Req),
+       {RespType, Req2#http_req{connection=RespConn, resp_state=RespState2,
+                       resp_headers=[], resp_body= <<>>}}.
+
+-spec response(cowboy:http_status(), cowboy:http_headers(),
+       cowboy:http_headers(), cowboy:http_headers(), stream | iodata(), Req)
+       -> {normal | hook, Req} when Req::req().
+response(Status, Headers, RespHeaders, DefaultHeaders, Body, Req=#http_req{
+               socket=Socket, transport=Transport, version=Version,
+               pid=ReqPid, onresponse=OnResponse}) ->
+       FullHeaders = case OnResponse of
+               already_called -> Headers;
+               _ -> response_merge_headers(Headers, RespHeaders, DefaultHeaders)
+       end,
+       Body2 = case Body of stream -> <<>>; _ -> Body end,
+       {Status2, FullHeaders2, Req2} = case OnResponse of
+               already_called -> {Status, FullHeaders, Req};
+               undefined -> {Status, FullHeaders, Req};
+               OnResponse ->
+                       case OnResponse(Status, FullHeaders, Body2,
+                                       %% Don't call 'onresponse' from the hook itself.
+                                       Req#http_req{resp_headers=[], resp_body= <<>>,
+                                               onresponse=already_called}) of
+                               StHdReq = {_, _, _} ->
+                                       StHdReq;
+                               Req1 ->
+                                       {Status, FullHeaders, Req1}
+                       end
+       end,
+       ReplyType = case Req2#http_req.resp_state of
+               waiting when Transport =:= cowboy_spdy, Body =:= stream ->
+                       cowboy_spdy:stream_reply(Socket, status(Status2), FullHeaders2),
+                       ReqPid ! {?MODULE, resp_sent},
+                       normal;
+               waiting when Transport =:= cowboy_spdy ->
+                       cowboy_spdy:reply(Socket, status(Status2), FullHeaders2, Body),
+                       ReqPid ! {?MODULE, resp_sent},
+                       normal;
+               RespState when RespState =:= waiting; RespState =:= waiting_stream ->
+                       HTTPVer = atom_to_binary(Version, latin1),
+                       StatusLine = << HTTPVer/binary, " ",
+                               (status(Status2))/binary, "\r\n" >>,
+                       HeaderLines = [[Key, <<": ">>, Value, <<"\r\n">>]
+                               || {Key, Value} <- FullHeaders2],
+                       Transport:send(Socket, [StatusLine, HeaderLines, <<"\r\n">>, Body2]),
+                       ReqPid ! {?MODULE, resp_sent},
+                       normal;
+               _ ->
+                       hook
+       end,
+       {ReplyType, Req2}.
+
+-spec response_connection(cowboy:http_headers(), keepalive | close)
+       -> keepalive | close.
+response_connection([], Connection) ->
+       Connection;
+response_connection([{Name, Value}|Tail], Connection) ->
+       case Name of
+               <<"connection">> ->
+                       Tokens = cow_http_hd:parse_connection(Value),
+                       connection_to_atom(Tokens);
+               _ ->
+                       response_connection(Tail, Connection)
+       end.
+
+-spec response_merge_headers(cowboy:http_headers(), cowboy:http_headers(),
+       cowboy:http_headers()) -> cowboy:http_headers().
+response_merge_headers(Headers, RespHeaders, DefaultHeaders) ->
+       Headers2 = [{Key, Value} || {Key, Value} <- Headers],
+       merge_headers(
+               merge_headers(Headers2, RespHeaders),
+               DefaultHeaders).
+
+-spec merge_headers(cowboy:http_headers(), cowboy:http_headers())
+       -> cowboy:http_headers().
+
+%% Merge headers by prepending the tuples in the second list to the
+%% first list. It also handles Set-Cookie properly, which supports
+%% duplicated entries. Notice that, while the RFC2109 does allow more
+%% than one cookie to be set per Set-Cookie header, we are following
+%% the implementation of common web servers and applications which
+%% return many distinct headers per each Set-Cookie entry to avoid
+%% issues with clients/browser which may not support it.
+merge_headers(Headers, []) ->
+       Headers;
+merge_headers(Headers, [{<<"set-cookie">>, Value}|Tail]) ->
+       merge_headers([{<<"set-cookie">>, Value}|Headers], Tail);
+merge_headers(Headers, [{Name, Value}|Tail]) ->
+       Headers2 = case lists:keymember(Name, 1, Headers) of
+               true -> Headers;
+               false -> [{Name, Value}|Headers]
+       end,
+       merge_headers(Headers2, Tail).
+
+-spec atom_to_connection(keepalive) -> <<_:80>>;
+                                               (close) -> <<_:40>>.
+atom_to_connection(keepalive) ->
+       <<"keep-alive">>;
+atom_to_connection(close) ->
+       <<"close">>.
+
+%% We don't match on "keep-alive" since it is the default value.
+-spec connection_to_atom([binary()]) -> keepalive | close.
+connection_to_atom([]) ->
+       keepalive;
+connection_to_atom([<<"close">>|_]) ->
+       close;
+connection_to_atom([_|Tail]) ->
+       connection_to_atom(Tail).
+
+-spec status(cowboy:http_status()) -> binary().
+status(100) -> <<"100 Continue">>;
+status(101) -> <<"101 Switching Protocols">>;
+status(102) -> <<"102 Processing">>;
+status(200) -> <<"200 OK">>;
+status(201) -> <<"201 Created">>;
+status(202) -> <<"202 Accepted">>;
+status(203) -> <<"203 Non-Authoritative Information">>;
+status(204) -> <<"204 No Content">>;
+status(205) -> <<"205 Reset Content">>;
+status(206) -> <<"206 Partial Content">>;
+status(207) -> <<"207 Multi-Status">>;
+status(226) -> <<"226 IM Used">>;
+status(300) -> <<"300 Multiple Choices">>;
+status(301) -> <<"301 Moved Permanently">>;
+status(302) -> <<"302 Found">>;
+status(303) -> <<"303 See Other">>;
+status(304) -> <<"304 Not Modified">>;
+status(305) -> <<"305 Use Proxy">>;
+status(306) -> <<"306 Switch Proxy">>;
+status(307) -> <<"307 Temporary Redirect">>;
+status(400) -> <<"400 Bad Request">>;
+status(401) -> <<"401 Unauthorized">>;
+status(402) -> <<"402 Payment Required">>;
+status(403) -> <<"403 Forbidden">>;
+status(404) -> <<"404 Not Found">>;
+status(405) -> <<"405 Method Not Allowed">>;
+status(406) -> <<"406 Not Acceptable">>;
+status(407) -> <<"407 Proxy Authentication Required">>;
+status(408) -> <<"408 Request Timeout">>;
+status(409) -> <<"409 Conflict">>;
+status(410) -> <<"410 Gone">>;
+status(411) -> <<"411 Length Required">>;
+status(412) -> <<"412 Precondition Failed">>;
+status(413) -> <<"413 Request Entity Too Large">>;
+status(414) -> <<"414 Request-URI Too Long">>;
+status(415) -> <<"415 Unsupported Media Type">>;
+status(416) -> <<"416 Requested Range Not Satisfiable">>;
+status(417) -> <<"417 Expectation Failed">>;
+status(418) -> <<"418 I'm a teapot">>;
+status(422) -> <<"422 Unprocessable Entity">>;
+status(423) -> <<"423 Locked">>;
+status(424) -> <<"424 Failed Dependency">>;
+status(425) -> <<"425 Unordered Collection">>;
+status(426) -> <<"426 Upgrade Required">>;
+status(428) -> <<"428 Precondition Required">>;
+status(429) -> <<"429 Too Many Requests">>;
+status(431) -> <<"431 Request Header Fields Too Large">>;
+status(500) -> <<"500 Internal Server Error">>;
+status(501) -> <<"501 Not Implemented">>;
+status(502) -> <<"502 Bad Gateway">>;
+status(503) -> <<"503 Service Unavailable">>;
+status(504) -> <<"504 Gateway Timeout">>;
+status(505) -> <<"505 HTTP Version Not Supported">>;
+status(506) -> <<"506 Variant Also Negotiates">>;
+status(507) -> <<"507 Insufficient Storage">>;
+status(510) -> <<"510 Not Extended">>;
+status(511) -> <<"511 Network Authentication Required">>;
+status(B) when is_binary(B) -> B.
+
+%% Tests.
+
+-ifdef(TEST).
+url_test() ->
+       {undefined, _} =
+               url(#http_req{transport=ranch_tcp, host= <<>>, port= undefined,
+                       path= <<>>, qs= <<>>, pid=self()}),
+       {<<"http://localhost/path">>, _ } =
+               url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=80,
+                       path= <<"/path">>, qs= <<>>, pid=self()}),
+       {<<"http://localhost:443/path">>, _} =
+               url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=443,
+                       path= <<"/path">>, qs= <<>>, pid=self()}),
+       {<<"http://localhost:8080/path">>, _} =
+               url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080,
+                       path= <<"/path">>, qs= <<>>, pid=self()}),
+       {<<"http://localhost:8080/path?dummy=2785">>, _} =
+               url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080,
+                       path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}),
+       {<<"https://localhost/path">>, _} =
+               url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=443,
+                       path= <<"/path">>, qs= <<>>, pid=self()}),
+       {<<"https://localhost:8443/path">>, _} =
+               url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443,
+                       path= <<"/path">>, qs= <<>>, pid=self()}),
+       {<<"https://localhost:8443/path?dummy=2785">>, _} =
+               url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443,
+                       path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}),
+       ok.
+
+connection_to_atom_test_() ->
+       Tests = [
+               {[<<"close">>], close},
+               {[<<"keep-alive">>], keepalive},
+               {[<<"keep-alive">>, <<"upgrade">>], keepalive}
+       ],
+       [{lists:flatten(io_lib:format("~p", [T])),
+               fun() -> R = connection_to_atom(T) end} || {T, R} <- Tests].
+
+merge_headers_test_() ->
+       Tests = [
+               {[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}],
+                [{<<"set-cookie">>,<<"foo=bar">>},{<<"content-length">>,<<"11">>}],
+                [{<<"set-cookie">>,<<"foo=bar">>},
+                 {<<"content-length">>,<<"13">>},
+                 {<<"server">>,<<"Cowboy">>}]},
+               {[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}],
+                [{<<"set-cookie">>,<<"foo=bar">>},{<<"set-cookie">>,<<"bar=baz">>}],
+                [{<<"set-cookie">>,<<"bar=baz">>},
+                 {<<"set-cookie">>,<<"foo=bar">>},
+                 {<<"content-length">>,<<"13">>},
+                 {<<"server">>,<<"Cowboy">>}]}
+       ],
+       [fun() -> Res = merge_headers(L,R) end || {L, R, Res} <- Tests].
+-endif.
similarity index 54%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_rest.erl
rename to deps/cowboy/src/cowboy_rest.erl
index e6cc6ffd10383b7014a938ea360534d87691d762..fe72583768e048ee87f6b3b6b18b04d6d402195e 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-%% @doc Experimental REST protocol implementation.
-%%
-%% Based on the Webmachine Diagram from Alan Dean and Justin Sheehy, which
-%% can be found in the Webmachine source tree, and on the Webmachine
-%% documentation available at http://wiki.basho.com/Webmachine.html
-%% at the time of writing.
--module(cowboy_http_rest).
+%% Originally based on the Webmachine Diagram from Alan Dean and
+%% Justin Sheehy.
+-module(cowboy_rest).
+-behaviour(cowboy_sub_protocol).
+
 -export([upgrade/4]).
 
 -record(state, {
+       env :: cowboy_middleware:env(),
+       method = undefined :: binary(),
+
        %% Handler.
        handler :: atom(),
        handler_state :: any(),
 
+       %% Allowed methods. Only used for OPTIONS requests.
+       allowed_methods :: [binary()],
+
        %% Media type.
        content_types_p = [] ::
-               [{{binary(), binary(), [{binary(), binary()}]}, atom()}],
+               [{binary() | {binary(), binary(), [{binary(), binary()}] | '*'},
+                       atom()}],
        content_type_a :: undefined
-               | {{binary(), binary(), [{binary(), binary()}]}, atom()},
+               | {binary() | {binary(), binary(), [{binary(), binary()}] | '*'},
+                       atom()},
 
        %% Language.
        languages_p = [] :: [binary()],
        charsets_p = [] :: [binary()],
        charset_a :: undefined | binary(),
 
+       %% Whether the resource exists.
+       exists = false :: boolean(),
+
        %% Cached resource calls.
-       etag :: undefined | no_call | binary(),
+       etag :: undefined | no_call | {strong | weak, binary()},
        last_modified :: undefined | no_call | calendar:datetime(),
-       expires :: undefined | no_call | calendar:datetime()
+       expires :: undefined | no_call | calendar:datetime() | binary()
 }).
 
--include("include/http.hrl").
-
-%% @doc Upgrade a HTTP request to the REST protocol.
-%%
-%% You do not need to call this function manually. To upgrade to the REST
-%% protocol, you simply need to return <em>{upgrade, protocol, {@module}}</em>
-%% in your <em>cowboy_http_handler:init/3</em> handler function.
--spec upgrade(pid(), module(), any(), #http_req{})
-       -> {ok, #http_req{}} | close.
-upgrade(_ListenerPid, Handler, Opts, Req) ->
-       try
-               case erlang:function_exported(Handler, rest_init, 2) of
-                       true ->
-                               case Handler:rest_init(Req, Opts) of
-                                       {ok, Req2, HandlerState} ->
-                                               service_available(Req2, #state{handler=Handler,
-                                                       handler_state=HandlerState})
-                               end;
-                       false ->
-                               service_available(Req, #state{handler=Handler})
-               end
-       catch Class:Reason ->
-               error_logger:error_msg(
-                       "** Handler ~p terminating in rest_init/3~n"
-                       "   for the reason ~p:~p~n** Options were ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, Opts, Req, erlang:get_stacktrace()]),
-               {ok, _Req2} = cowboy_http_req:reply(500, Req),
-               close
+-spec upgrade(Req, Env, module(), any())
+       -> {ok, Req, Env} when Req::cowboy_req:req(), Env::cowboy_middleware:env().
+upgrade(Req, Env, Handler, HandlerOpts) ->
+       Method = cowboy_req:get(method, Req),
+       case erlang:function_exported(Handler, rest_init, 2) of
+               true ->
+                       try Handler:rest_init(Req, HandlerOpts) of
+                               {ok, Req2, HandlerState} ->
+                                       service_available(Req2, #state{env=Env, method=Method,
+                                               handler=Handler, handler_state=HandlerState})
+                       catch Class:Reason ->
+                               Stacktrace = erlang:get_stacktrace(),
+                               cowboy_req:maybe_reply(Stacktrace, Req),
+                               erlang:Class([
+                                       {reason, Reason},
+                                       {mfa, {Handler, rest_init, 2}},
+                                       {stacktrace, Stacktrace},
+                                       {req, cowboy_req:to_list(Req)},
+                                       {opts, HandlerOpts}
+                               ])
+                       end;
+               false ->
+                       service_available(Req, #state{env=Env, method=Method,
+                               handler=Handler})
        end.
 
 service_available(Req, State) ->
        expect(Req, State, service_available, true, fun known_methods/2, 503).
 
-%% known_methods/2 should return a list of atoms or binary methods.
-known_methods(Req=#http_req{method=Method}, State) ->
+%% known_methods/2 should return a list of binary methods.
+known_methods(Req, State=#state{method=Method}) ->
        case call(Req, State, known_methods) of
-               no_call when Method =:= 'HEAD'; Method =:= 'GET'; Method =:= 'POST';
-                                       Method =:= 'PUT'; Method =:= 'DELETE'; Method =:= 'TRACE';
-                                       Method =:= 'CONNECT'; Method =:= 'OPTIONS' ->
+               no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">>;
+                               Method =:= <<"POST">>; Method =:= <<"PUT">>;
+                               Method =:= <<"PATCH">>; Method =:= <<"DELETE">>;
+                               Method =:= <<"OPTIONS">> ->
                        next(Req, State, fun uri_too_long/2);
                no_call ->
                        next(Req, State, 501);
@@ -102,38 +107,41 @@ known_methods(Req=#http_req{method=Method}, State) ->
 uri_too_long(Req, State) ->
        expect(Req, State, uri_too_long, false, fun allowed_methods/2, 414).
 
-%% allowed_methods/2 should return a list of atoms or binary methods.
-allowed_methods(Req=#http_req{method=Method}, State) ->
+%% allowed_methods/2 should return a list of binary methods.
+allowed_methods(Req, State=#state{method=Method}) ->
        case call(Req, State, allowed_methods) of
-               no_call when Method =:= 'HEAD'; Method =:= 'GET' ->
+               no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">> ->
                        next(Req, State, fun malformed_request/2);
+               no_call when Method =:= <<"OPTIONS">> ->
+                       next(Req, State#state{allowed_methods=
+                               [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]},
+                               fun malformed_request/2);
                no_call ->
-                       method_not_allowed(Req, State, ['GET', 'HEAD']);
+                       method_not_allowed(Req, State,
+                               [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]);
                {halt, Req2, HandlerState} ->
                        terminate(Req2, State#state{handler_state=HandlerState});
                {List, Req2, HandlerState} ->
                        State2 = State#state{handler_state=HandlerState},
                        case lists:member(Method, List) of
-                               true -> next(Req2, State2, fun malformed_request/2);
-                               false -> method_not_allowed(Req2, State2, List)
+                               true when Method =:= <<"OPTIONS">> ->
+                                       next(Req2, State2#state{allowed_methods=List},
+                                               fun malformed_request/2);
+                               true ->
+                                       next(Req2, State2, fun malformed_request/2);
+                               false ->
+                                       method_not_allowed(Req2, State2, List)
                        end
        end.
 
+method_not_allowed(Req, State, []) ->
+       Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req),
+       respond(Req2, State, 405);
 method_not_allowed(Req, State, Methods) ->
-       {ok, Req2} = cowboy_http_req:set_resp_header(
-               <<"Allow">>, method_not_allowed_build(Methods, []), Req),
+       << ", ", Allow/binary >> = << << ", ", M/binary >> || M <- Methods >>,
+       Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req),
        respond(Req2, State, 405).
 
-method_not_allowed_build([], []) ->
-       <<>>;
-method_not_allowed_build([], [_Ignore|Acc]) ->
-       lists:reverse(Acc);
-method_not_allowed_build([Method|Tail], Acc) when is_atom(Method) ->
-       Method2 = list_to_binary(atom_to_list(Method)),
-       method_not_allowed_build(Tail, [<<", ">>, Method2|Acc]);
-method_not_allowed_build([Method|Tail], Acc) ->
-       method_not_allowed_build(Tail, [<<", ">>, Method|Acc]).
-
 malformed_request(Req, State) ->
        expect(Req, State, malformed_request, false, fun is_authorized/2, 400).
 
@@ -147,8 +155,8 @@ is_authorized(Req, State) ->
                {true, Req2, HandlerState} ->
                        forbidden(Req2, State#state{handler_state=HandlerState});
                {{false, AuthHead}, Req2, HandlerState} ->
-                       {ok, Req3} = cowboy_http_req:set_resp_header(
-                               <<"Www-Authenticate">>, AuthHead, Req2),
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"www-authenticate">>, AuthHead, Req2),
                        respond(Req3, State#state{handler_state=HandlerState}, 401)
        end.
 
@@ -161,15 +169,23 @@ valid_content_headers(Req, State) ->
 
 known_content_type(Req, State) ->
        expect(Req, State, known_content_type, true,
-               fun valid_entity_length/2, 413).
+               fun valid_entity_length/2, 415).
 
 valid_entity_length(Req, State) ->
        expect(Req, State, valid_entity_length, true, fun options/2, 413).
 
 %% If you need to add additional headers to the response at this point,
 %% you should do it directly in the options/2 call using set_resp_headers.
-options(Req=#http_req{method='OPTIONS'}, State) ->
+options(Req, State=#state{allowed_methods=Methods, method= <<"OPTIONS">>}) ->
        case call(Req, State, options) of
+               no_call when Methods =:= [] ->
+                       Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req),
+                       respond(Req2, State, 200);
+               no_call ->
+                       << ", ", Allow/binary >>
+                               = << << ", ", M/binary >> || M <- Methods >>,
+                       Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req),
+                       respond(Req2, State, 200);
                {halt, Req2, HandlerState} ->
                        terminate(Req2, State#state{handler_state=HandlerState});
                {ok, Req2, HandlerState} ->
@@ -190,39 +206,53 @@ options(Req, State) ->
 %%
 %% Note that it is also possible to return a binary content type that will
 %% then be parsed by Cowboy. However note that while this may make your
-%% resources a little more readable, this is a lot less efficient. An example
-%% of such a return value would be:
+%% resources a little more readable, this is a lot less efficient.
+%%
+%% An example of such return value would be:
 %%    {<<"text/html">>, to_html}
-content_types_provided(Req=#http_req{meta=Meta}, State) ->
+content_types_provided(Req, State) ->
        case call(Req, State, content_types_provided) of
                no_call ->
-                       not_acceptable(Req, State);
+                       State2 = State#state{
+                               content_types_p=[{{<<"text">>, <<"html">>, '*'}, to_html}]},
+                       case cowboy_req:parse_header(<<"accept">>, Req) of
+                               {error, badarg} ->
+                                       respond(Req, State2, 400);
+                               {ok, undefined, Req2} ->
+                                       languages_provided(
+                                               cowboy_req:set_meta(media_type, {<<"text">>, <<"html">>, []}, Req2),
+                                               State2#state{content_type_a={{<<"text">>, <<"html">>, []}, to_html}});
+                               {ok, Accept, Req2} ->
+                                       Accept2 = prioritize_accept(Accept),
+                                       choose_media_type(Req2, State2, Accept2)
+                       end;
                {halt, Req2, HandlerState} ->
                        terminate(Req2, State#state{handler_state=HandlerState});
                {[], Req2, HandlerState} ->
                        not_acceptable(Req2, State#state{handler_state=HandlerState});
                {CTP, Req2, HandlerState} ->
-                   CTP2 = [normalize_content_types_provided(P) || P <- CTP],
+                       CTP2 = [normalize_content_types(P) || P <- CTP],
                        State2 = State#state{
                                handler_state=HandlerState, content_types_p=CTP2},
-                       {Accept, Req3} = cowboy_http_req:parse_header('Accept', Req2),
-                       case Accept of
-                               undefined ->
+                       case cowboy_req:parse_header(<<"accept">>, Req2) of
+                               {error, badarg} ->
+                                       respond(Req2, State2, 400);
+                               {ok, undefined, Req3} ->
                                        {PMT, _Fun} = HeadCTP = hd(CTP2),
                                        languages_provided(
-                                               Req3#http_req{meta=[{media_type, PMT}|Meta]},
+                                               cowboy_req:set_meta(media_type, PMT, Req3),
                                                State2#state{content_type_a=HeadCTP});
-                               Accept ->
+                               {ok, Accept, Req3} ->
                                        Accept2 = prioritize_accept(Accept),
                                        choose_media_type(Req3, State2, Accept2)
                        end
        end.
 
-normalize_content_types_provided({ContentType, Handler})
+normalize_content_types({ContentType, Callback})
                when is_binary(ContentType) ->
-    {cowboy_http:content_type(ContentType), Handler};
-normalize_content_types_provided(Provided) ->
-       Provided.
+       {cowboy_http:content_type(ContentType), Callback};
+normalize_content_types(Normalized) ->
+       Normalized.
 
 prioritize_accept(Accept) ->
        lists:sort(
@@ -273,12 +303,18 @@ match_media_type(Req, State, Accept,
 match_media_type(Req, State, Accept, [_Any|Tail], MediaType) ->
        match_media_type(Req, State, Accept, Tail, MediaType).
 
-match_media_type_params(Req=#http_req{meta=Meta}, State, Accept,
+match_media_type_params(Req, State, _Accept,
+               [Provided = {{TP, STP, '*'}, _Fun}|_Tail],
+               {{_TA, _STA, Params_A}, _QA, _APA}) ->
+       PMT = {TP, STP, Params_A},
+       languages_provided(cowboy_req:set_meta(media_type, PMT, Req),
+               State#state{content_type_a=Provided});
+match_media_type_params(Req, State, Accept,
                [Provided = {PMT = {_TP, _STP, Params_P}, _Fun}|Tail],
                MediaType = {{_TA, _STA, Params_A}, _QA, _APA}) ->
        case lists:sort(Params_P) =:= lists:sort(Params_A) of
                true ->
-                       languages_provided(Req#http_req{meta=[{media_type, PMT}|Meta]},
+                       languages_provided(cowboy_req:set_meta(media_type, PMT, Req),
                                State#state{content_type_a=Provided});
                false ->
                        match_media_type(Req, State, Accept, Tail, MediaType)
@@ -299,8 +335,8 @@ languages_provided(Req, State) ->
                        not_acceptable(Req2, State#state{handler_state=HandlerState});
                {LP, Req2, HandlerState} ->
                        State2 = State#state{handler_state=HandlerState, languages_p=LP},
-                       {AcceptLanguage, Req3} =
-                               cowboy_http_req:parse_header('Accept-Language', Req2),
+                       {ok, AcceptLanguage, Req3} =
+                               cowboy_req:parse_header(<<"accept-language">>, Req2),
                        case AcceptLanguage of
                                undefined ->
                                        set_language(Req3, State2#state{language_a=hd(LP)});
@@ -345,10 +381,9 @@ match_language(Req, State, Accept, [Provided|Tail],
                        match_language(Req, State, Accept, Tail, Language)
        end.
 
-set_language(Req=#http_req{meta=Meta}, State=#state{language_a=Language}) ->
-       {ok, Req2} = cowboy_http_req:set_resp_header(
-               <<"Content-Language">>, Language, Req),
-       charsets_provided(Req2#http_req{meta=[{language, Language}|Meta]}, State).
+set_language(Req, State=#state{language_a=Language}) ->
+       Req2 = cowboy_req:set_resp_header(<<"content-language">>, Language, Req),
+       charsets_provided(cowboy_req:set_meta(language, Language, Req2), State).
 
 %% charsets_provided should return a list of binary values indicating
 %% which charsets are accepted by the resource.
@@ -362,8 +397,8 @@ charsets_provided(Req, State) ->
                        not_acceptable(Req2, State#state{handler_state=HandlerState});
                {CP, Req2, HandlerState} ->
                        State2 = State#state{handler_state=HandlerState, charsets_p=CP},
-                       {AcceptCharset, Req3} =
-                               cowboy_http_req:parse_header('Accept-Charset', Req2),
+                       {ok, AcceptCharset, Req3} =
+                               cowboy_req:parse_header(<<"accept-charset">>, Req2),
                        case AcceptCharset of
                                undefined ->
                                        set_content_type(Req3, State2#state{charset_a=hd(CP)});
@@ -386,7 +421,11 @@ prioritize_charsets(AcceptCharsets) ->
                end, AcceptCharsets),
        case lists:keymember(<<"*">>, 1, AcceptCharsets2) of
                true -> AcceptCharsets2;
-               false -> [{<<"iso-8859-1">>, 1000}|AcceptCharsets2]
+               false ->
+                       case lists:keymember(<<"iso-8859-1">>, 1, AcceptCharsets2) of
+                               true -> AcceptCharsets2;
+                               false -> [{<<"iso-8859-1">>, 1000}|AcceptCharsets2]
+                       end
        end.
 
 choose_charset(Req, State, []) ->
@@ -396,13 +435,12 @@ choose_charset(Req, State=#state{charsets_p=CP}, [Charset|Tail]) ->
 
 match_charset(Req, State, Accept, [], _Charset) ->
        choose_charset(Req, State, Accept);
-match_charset(Req, State, _Accept, [Provided|_Tail],
-               {Provided, _Quality}) ->
+match_charset(Req, State, _Accept, [Provided|_], {Provided, _}) ->
        set_content_type(Req, State#state{charset_a=Provided});
-match_charset(Req, State, Accept, [_Provided|Tail], Charset) ->
+match_charset(Req, State, Accept, [_|Tail], Charset) ->
        match_charset(Req, State, Accept, Tail, Charset).
 
-set_content_type(Req=#http_req{meta=Meta}, State=#state{
+set_content_type(Req, State=#state{
                content_type_a={{Type, SubType, Params}, _Fun},
                charset_a=Charset}) ->
        ParamsBin = set_content_type_build_params(Params, []),
@@ -411,10 +449,11 @@ set_content_type(Req=#http_req{meta=Meta}, State=#state{
                undefined -> ContentType;
                Charset -> [ContentType, <<"; charset=">>, Charset]
        end,
-       {ok, Req2} = cowboy_http_req:set_resp_header(
-               <<"Content-Type">>, ContentType2, Req),
-       encodings_provided(Req2#http_req{meta=[{charset, Charset}|Meta]}, State).
+       Req2 = cowboy_req:set_resp_header(<<"content-type">>, ContentType2, Req),
+       encodings_provided(cowboy_req:set_meta(charset, Charset, Req2), State).
 
+set_content_type_build_params('*', []) ->
+       <<>>;
 set_content_type_build_params([], []) ->
        <<>>;
 set_content_type_build_params([], Acc) ->
@@ -443,120 +482,140 @@ variances(Req, State=#state{content_types_p=CTP,
        Variances = case CTP of
                [] -> [];
                [_] -> [];
-               [_|_] -> [<<"Accept">>]
+               [_|_] -> [<<"accept">>]
        end,
        Variances2 = case LP of
                [] -> Variances;
                [_] -> Variances;
-               [_|_] -> [<<"Accept-Language">>|Variances]
+               [_|_] -> [<<"accept-language">>|Variances]
        end,
        Variances3 = case CP of
                [] -> Variances2;
                [_] -> Variances2;
-               [_|_] -> [<<"Accept-Charset">>|Variances2]
+               [_|_] -> [<<"accept-charset">>|Variances2]
        end,
-       {Variances4, Req3, State2} = case call(Req, State, variances) of
+       try variances(Req, State, Variances3) of
+               {Variances4, Req2, State2} ->
+                       case [[<<", ">>, V] || V <- Variances4] of
+                               [] ->
+                                       resource_exists(Req2, State2);
+                               [[<<", ">>, H]|Variances5] ->
+                                       Req3 = cowboy_req:set_resp_header(
+                                               <<"vary">>, [H|Variances5], Req2),
+                                       resource_exists(Req3, State2)
+                       end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, variances)
+       end.
+
+variances(Req, State, Variances) ->
+       case unsafe_call(Req, State, variances) of
                no_call ->
-                       {Variances3, Req, State};
+                       {Variances, Req, State};
                {HandlerVariances, Req2, HandlerState} ->
-                       {Variances3 ++ HandlerVariances, Req2,
+                       {Variances ++ HandlerVariances, Req2,
                                State#state{handler_state=HandlerState}}
-       end,
-       case [[<<", ">>, V] || V <- Variances4] of
-               [] ->
-                       resource_exists(Req3, State2);
-               [[<<", ">>, H]|Variances5] ->
-                       {ok, Req4} = cowboy_http_req:set_resp_header(
-                               <<"Variances">>, [H|Variances5], Req3),
-                       resource_exists(Req4, State2)
        end.
 
 resource_exists(Req, State) ->
        expect(Req, State, resource_exists, true,
-               fun if_match_exists/2, fun if_match_musnt_exist/2).
+               fun if_match_exists/2, fun if_match_must_not_exist/2).
 
 if_match_exists(Req, State) ->
-       case cowboy_http_req:parse_header('If-Match', Req) of
-               {undefined, Req2} ->
-                       if_unmodified_since_exists(Req2, State);
-               {'*', Req2} ->
-                       if_unmodified_since_exists(Req2, State);
-               {ETagsList, Req2} ->
-                       if_match(Req2, State, ETagsList)
+       State2 = State#state{exists=true},
+       case cowboy_req:parse_header(<<"if-match">>, Req) of
+               {ok, undefined, Req2} ->
+                       if_unmodified_since_exists(Req2, State2);
+               {ok, '*', Req2} ->
+                       if_unmodified_since_exists(Req2, State2);
+               {ok, ETagsList, Req2} ->
+                       if_match(Req2, State2, ETagsList);
+               {error, badarg} ->
+                       respond(Req, State2, 400)
        end.
 
 if_match(Req, State, EtagsList) ->
-       {Etag, Req2, State2} = generate_etag(Req, State),
-       case Etag of
-               no_call ->
-                       precondition_failed(Req2, State2);
-               Etag ->
+       try generate_etag(Req, State) of
+               {Etag, Req2, State2} ->
                        case lists:member(Etag, EtagsList) of
                                true -> if_unmodified_since_exists(Req2, State2);
+                               %% Etag may be `undefined' which cannot be a member.
                                false -> precondition_failed(Req2, State2)
                        end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, generate_etag)
        end.
 
-if_match_musnt_exist(Req, State) ->
-       case cowboy_http_req:header('If-Match', Req) of
+if_match_must_not_exist(Req, State) ->
+       case cowboy_req:header(<<"if-match">>, Req) of
                {undefined, Req2} -> is_put_to_missing_resource(Req2, State);
                {_Any, Req2} -> precondition_failed(Req2, State)
        end.
 
 if_unmodified_since_exists(Req, State) ->
-       case cowboy_http_req:parse_header('If-Unmodified-Since', Req) of
-               {undefined, Req2} ->
-                       if_none_match_exists(Req2, State);
-               {{error, badarg}, Req2} ->
+       case cowboy_req:parse_header(<<"if-unmodified-since">>, Req) of
+               {ok, undefined, Req2} ->
                        if_none_match_exists(Req2, State);
-               {IfUnmodifiedSince, Req2} ->
-                       if_unmodified_since(Req2, State, IfUnmodifiedSince)
+               {ok, IfUnmodifiedSince, Req2} ->
+                       if_unmodified_since(Req2, State, IfUnmodifiedSince);
+               {error, badarg} ->
+                       if_none_match_exists(Req, State)
        end.
 
 %% If LastModified is the atom 'no_call', we continue.
 if_unmodified_since(Req, State, IfUnmodifiedSince) ->
-       {LastModified, Req2, State2} = last_modified(Req, State),
-       case LastModified > IfUnmodifiedSince of
-               true -> precondition_failed(Req2, State2);
-               false -> if_none_match_exists(Req2, State2)
+       try last_modified(Req, State) of
+               {LastModified, Req2, State2} ->
+                       case LastModified > IfUnmodifiedSince of
+                               true -> precondition_failed(Req2, State2);
+                               false -> if_none_match_exists(Req2, State2)
+                       end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, last_modified)
        end.
 
 if_none_match_exists(Req, State) ->
-       case cowboy_http_req:parse_header('If-None-Match', Req) of
-               {undefined, Req2} ->
+       case cowboy_req:parse_header(<<"if-none-match">>, Req) of
+               {ok, undefined, Req2} ->
                        if_modified_since_exists(Req2, State);
-               {'*', Req2} ->
+               {ok, '*', Req2} ->
                        precondition_is_head_get(Req2, State);
-               {EtagsList, Req2} ->
-                       if_none_match(Req2, State, EtagsList)
+               {ok, EtagsList, Req2} ->
+                       if_none_match(Req2, State, EtagsList);
+               {error, badarg} ->
+                       respond(Req, State, 400)
        end.
 
 if_none_match(Req, State, EtagsList) ->
-       {Etag, Req2, State2} = generate_etag(Req, State),
-       case Etag of
-               no_call ->
-                       precondition_failed(Req2, State2);
-               Etag ->
-                       case lists:member(Etag, EtagsList) of
-                               true -> precondition_is_head_get(Req2, State2);
-                               false -> if_modified_since_exists(Req2, State2)
+       try generate_etag(Req, State) of
+               {Etag, Req2, State2} ->
+                       case Etag of
+                               undefined ->
+                                       precondition_failed(Req2, State2);
+                               Etag ->
+                                       case lists:member(Etag, EtagsList) of
+                                               true -> precondition_is_head_get(Req2, State2);
+                                               false -> if_modified_since_exists(Req2, State2)
+                                       end
                        end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, generate_etag)
        end.
 
-precondition_is_head_get(Req=#http_req{method=Method}, State)
-               when Method =:= 'HEAD'; Method =:= 'GET' ->
+precondition_is_head_get(Req, State=#state{method=Method})
+               when Method =:= <<"HEAD">>; Method =:= <<"GET">> ->
        not_modified(Req, State);
 precondition_is_head_get(Req, State) ->
        precondition_failed(Req, State).
 
 if_modified_since_exists(Req, State) ->
-       case cowboy_http_req:parse_header('If-Modified-Since', Req) of
-               {undefined, Req2} ->
+       case cowboy_req:parse_header(<<"if-modified-since">>, Req) of
+               {ok, undefined, Req2} ->
                        method(Req2, State);
-               {{error, badarg}, Req2} ->
-                       method(Req2, State);
-               {IfModifiedSince, Req2} ->
-                       if_modified_since_now(Req2, State, IfModifiedSince)
+               {ok, IfModifiedSince, Req2} ->
+                       if_modified_since_now(Req2, State, IfModifiedSince);
+               {error, badarg} ->
+                       method(Req, State)
        end.
 
 if_modified_since_now(Req, State, IfModifiedSince) ->
@@ -566,28 +625,36 @@ if_modified_since_now(Req, State, IfModifiedSince) ->
        end.
 
 if_modified_since(Req, State, IfModifiedSince) ->
-       {LastModified, Req2, State2} = last_modified(Req, State),
-       case LastModified of
-               no_call ->
+       try last_modified(Req, State) of
+               {no_call, Req2, State2} ->
                        method(Req2, State2);
-               LastModified ->
+               {LastModified, Req2, State2} ->
                        case LastModified > IfModifiedSince of
                                true -> method(Req2, State2);
                                false -> not_modified(Req2, State2)
                        end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, last_modified)
+       end.
+
+not_modified(Req, State) ->
+       Req2 = cowboy_req:delete_resp_header(<<"content-type">>, Req),
+       try set_resp_etag(Req2, State) of
+               {Req3, State2} ->
+                       try set_resp_expires(Req3, State2) of
+                               {Req4, State3} ->
+                                       respond(Req4, State3, 304)
+                       catch Class:Reason ->
+                               error_terminate(Req, State, Class, Reason, expires)
+                       end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, generate_etag)
        end.
 
-not_modified(Req=#http_req{resp_headers=RespHeaders}, State) ->
-       RespHeaders2 = lists:keydelete(<<"Content-Type">>, 1, RespHeaders),
-       Req2 = Req#http_req{resp_headers=RespHeaders2},
-       {Req3, State2} = set_resp_etag(Req2, State),
-       {Req4, State3} = set_resp_expires(Req3, State2),
-       respond(Req4, State3, 304).
-
 precondition_failed(Req, State) ->
        respond(Req, State, 412).
 
-is_put_to_missing_resource(Req=#http_req{method='PUT'}, State) ->
+is_put_to_missing_resource(Req, State=#state{method= <<"PUT">>}) ->
        moved_permanently(Req, State, fun is_conflict/2);
 is_put_to_missing_resource(Req, State) ->
        previously_existed(Req, State).
@@ -597,8 +664,8 @@ is_put_to_missing_resource(Req, State) ->
 moved_permanently(Req, State, OnFalse) ->
        case call(Req, State, moved_permanently) of
                {{true, Location}, Req2, HandlerState} ->
-                       {ok, Req3} = cowboy_http_req:set_resp_header(
-                               <<"Location">>, Location, Req2),
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"location">>, Location, Req2),
                        respond(Req3, State#state{handler_state=HandlerState}, 301);
                {false, Req2, HandlerState} ->
                        OnFalse(Req2, State#state{handler_state=HandlerState});
@@ -618,8 +685,8 @@ previously_existed(Req, State) ->
 moved_temporarily(Req, State) ->
        case call(Req, State, moved_temporarily) of
                {{true, Location}, Req2, HandlerState} ->
-                       {ok, Req3} = cowboy_http_req:set_resp_header(
-                               <<"Location">>, Location, Req2),
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"location">>, Location, Req2),
                        respond(Req3, State#state{handler_state=HandlerState}, 307);
                {false, Req2, HandlerState} ->
                        is_post_to_missing_resource(Req2, State#state{handler_state=HandlerState}, 410);
@@ -629,174 +696,184 @@ moved_temporarily(Req, State) ->
                        is_post_to_missing_resource(Req, State, 410)
        end.
 
-is_post_to_missing_resource(Req=#http_req{method='POST'}, State, OnFalse) ->
+is_post_to_missing_resource(Req, State=#state{method= <<"POST">>}, OnFalse) ->
        allow_missing_post(Req, State, OnFalse);
 is_post_to_missing_resource(Req, State, OnFalse) ->
        respond(Req, State, OnFalse).
 
 allow_missing_post(Req, State, OnFalse) ->
-       expect(Req, State, allow_missing_post, true, fun post_is_create/2, OnFalse).
+       expect(Req, State, allow_missing_post, true, fun accept_resource/2, OnFalse).
 
-method(Req=#http_req{method='DELETE'}, State) ->
+method(Req, State=#state{method= <<"DELETE">>}) ->
        delete_resource(Req, State);
-method(Req=#http_req{method='POST'}, State) ->
-       post_is_create(Req, State);
-method(Req=#http_req{method='PUT'}, State) ->
+method(Req, State=#state{method= <<"PUT">>}) ->
        is_conflict(Req, State);
+method(Req, State=#state{method=Method})
+               when Method =:= <<"POST">>; Method =:= <<"PATCH">> ->
+       accept_resource(Req, State);
+method(Req, State=#state{method=Method})
+               when Method =:= <<"GET">>; Method =:= <<"HEAD">> ->
+       set_resp_body_etag(Req, State);
 method(Req, State) ->
-       set_resp_body(Req, State).
+       multiple_choices(Req, State).
 
 %% delete_resource/2 should start deleting the resource and return.
 delete_resource(Req, State) ->
-       expect(Req, State, delete_resource, true, fun delete_completed/2, 500).
+       expect(Req, State, delete_resource, false, 500, fun delete_completed/2).
 
 %% delete_completed/2 indicates whether the resource has been deleted yet.
 delete_completed(Req, State) ->
        expect(Req, State, delete_completed, true, fun has_resp_body/2, 202).
 
-%% post_is_create/2 indicates whether the POST method can create new resources.
-post_is_create(Req, State) ->
-       expect(Req, State, post_is_create, false, fun process_post/2, fun create_path/2).
-
-%% When the POST method can create new resources, create_path/2 will be called
-%% and is expected to return the full path to the new resource
-%% (including the leading /).
-create_path(Req=#http_req{meta=Meta}, State) ->
-       case call(Req, State, create_path) of
-               {halt, Req2, HandlerState} ->
-                       terminate(Req2, State#state{handler_state=HandlerState});
-               {Path, Req2, HandlerState} ->
-                       Location = create_path_location(Req2, Path),
-                       State2 = State#state{handler_state=HandlerState},
-                       {ok, Req3} = cowboy_http_req:set_resp_header(
-                               <<"Location">>, Location, Req2),
-                       put_resource(Req3#http_req{meta=[{put_path, Path}|Meta]},
-                               State2, 303)
-       end.
-
-create_path_location(#http_req{transport=Transport, raw_host=Host,
-               port=Port}, Path) ->
-       TransportName = Transport:name(),
-       << (create_path_location_protocol(TransportName))/binary, "://",
-               Host/binary, (create_path_location_port(TransportName, Port))/binary,
-               Path/binary >>.
-
-create_path_location_protocol(ssl) -> <<"https">>;
-create_path_location_protocol(_) -> <<"http">>.
-
-create_path_location_port(ssl, 443) ->
-       <<>>;
-create_path_location_port(tcp, 80) ->
-       <<>>;
-create_path_location_port(_, Port) ->
-       <<":", (list_to_binary(integer_to_list(Port)))/binary>>.
-
-%% process_post should return true when the POST body could be processed
-%% and false when it hasn't, in which case a 500 error is sent.
-process_post(Req, State) ->
-       case call(Req, State, process_post) of
-               {halt, Req2, HandlerState} ->
-                       terminate(Req2, State#state{handler_state=HandlerState});
-               {true, Req2, HandlerState} ->
-                       State2 = State#state{handler_state=HandlerState},
-                       next(Req2, State2, 201);
-               {false, Req2, HandlerState} ->
-                       State2 = State#state{handler_state=HandlerState},
-                       respond(Req2, State2, 500)
-       end.
-
 is_conflict(Req, State) ->
-       expect(Req, State, is_conflict, false, fun put_resource/2, 409).
-
-put_resource(Req=#http_req{raw_path=RawPath, meta=Meta}, State) ->
-       Req2 = Req#http_req{meta=[{put_path, RawPath}|Meta]},
-       put_resource(Req2, State, fun is_new_resource/2).
+       expect(Req, State, is_conflict, false, fun accept_resource/2, 409).
 
 %% content_types_accepted should return a list of media types and their
 %% associated callback functions in the same format as content_types_provided.
 %%
 %% The callback will then be called and is expected to process the content
-%% pushed to the resource in the request body. The path to the new resource
-%% may be different from the request path, and is stored as request metadata.
-%% It is always defined past this point. It can be retrieved as demonstrated:
-%%     {PutPath, Req2} = cowboy_http_req:meta(put_path, Req)
-put_resource(Req, State, OnTrue) ->
+%% pushed to the resource in the request body.
+%%
+%% content_types_accepted SHOULD return a different list
+%% for each HTTP method.
+accept_resource(Req, State) ->
        case call(Req, State, content_types_accepted) of
                no_call ->
                        respond(Req, State, 415);
                {halt, Req2, HandlerState} ->
                        terminate(Req2, State#state{handler_state=HandlerState});
                {CTA, Req2, HandlerState} ->
+                       CTA2 = [normalize_content_types(P) || P <- CTA],
                        State2 = State#state{handler_state=HandlerState},
-                       {ContentType, Req3}
-                               = cowboy_http_req:parse_header('Content-Type', Req2),
-                       choose_content_type(Req3, State2, OnTrue, ContentType, CTA)
+                       case cowboy_req:parse_header(<<"content-type">>, Req2) of
+                               {ok, ContentType, Req3} ->
+                                       choose_content_type(Req3, State2, ContentType, CTA2);
+                               {error, badarg} ->
+                                       respond(Req2, State2, 415)
+                       end
        end.
 
-choose_content_type(Req, State, _OnTrue, _ContentType, []) ->
+%% The special content type '*' will always match. It can be used as a
+%% catch-all content type for accepting any kind of request content.
+%% Note that because it will always match, it should be the last of the
+%% list of content types, otherwise it'll shadow the ones following.
+choose_content_type(Req, State, _ContentType, []) ->
        respond(Req, State, 415);
-choose_content_type(Req, State, OnTrue, ContentType,
-               [{Accepted, Fun}|_Tail]) when ContentType =:= Accepted ->
-       case call(Req, State, Fun) of
-               {halt, Req2, HandlerState} ->
-                       terminate(Req2, State#state{handler_state=HandlerState});
-               {true, Req2, HandlerState} ->
-                       State2 = State#state{handler_state=HandlerState},
-                       next(Req2, State2, OnTrue);
-               {false, Req2, HandlerState} ->
-                       State2 = State#state{handler_state=HandlerState},
-                       respond(Req2, State2, 500)
-       end;
-choose_content_type(Req, State, OnTrue, ContentType, [_Any|Tail]) ->
-       choose_content_type(Req, State, OnTrue, ContentType, Tail).
-
-%% Whether we created a new resource, either through PUT or POST.
-%% This is easily testable because we would have set the Location
-%% header by this point if we did so.
-is_new_resource(Req, State) ->
-       case cowboy_http_req:has_resp_header(<<"Location">>, Req) of
+choose_content_type(Req, State, ContentType, [{Accepted, Fun}|_Tail])
+               when Accepted =:= '*'; Accepted =:= ContentType ->
+       process_content_type(Req, State, Fun);
+%% The special parameter '*' will always match any kind of content type
+%% parameters.
+%% Note that because it will always match, it should be the last of the
+%% list for specific content type, otherwise it'll shadow the ones following.
+choose_content_type(Req, State, {Type, SubType, Param},
+               [{{Type, SubType, AcceptedParam}, Fun}|_Tail])
+               when AcceptedParam =:= '*'; AcceptedParam =:= Param ->
+       process_content_type(Req, State, Fun);
+choose_content_type(Req, State, ContentType, [_Any|Tail]) ->
+       choose_content_type(Req, State, ContentType, Tail).
+
+process_content_type(Req, State=#state{method=Method, exists=Exists}, Fun) ->
+       try case call(Req, State, Fun) of
+               {halt, Req2, HandlerState2} ->
+                       terminate(Req2, State#state{handler_state=HandlerState2});
+               {true, Req2, HandlerState2} when Exists ->
+                       State2 = State#state{handler_state=HandlerState2},
+                       next(Req2, State2, fun has_resp_body/2);
+               {true, Req2, HandlerState2} ->
+                       State2 = State#state{handler_state=HandlerState2},
+                       next(Req2, State2, fun maybe_created/2);
+               {false, Req2, HandlerState2} ->
+                       State2 = State#state{handler_state=HandlerState2},
+                       respond(Req2, State2, 400);
+               {{true, ResURL}, Req2, HandlerState2} when Method =:= <<"POST">> ->
+                       State2 = State#state{handler_state=HandlerState2},
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"location">>, ResURL, Req2),
+                       if
+                               Exists -> respond(Req3, State2, 303);
+                               true -> respond(Req3, State2, 201)
+                       end
+       end catch Class:Reason = {case_clause, no_call} ->
+               error_terminate(Req, State, Class, Reason, Fun)
+       end.
+
+%% If PUT was used then the resource has been created at the current URL.
+%% Otherwise, if a location header has been set then the resource has been
+%% created at a new URL. If not, send a 200 or 204 as expected from a
+%% POST or PATCH request.
+maybe_created(Req, State=#state{method= <<"PUT">>}) ->
+       respond(Req, State, 201);
+maybe_created(Req, State) ->
+       case cowboy_req:has_resp_header(<<"location">>, Req) of
                true -> respond(Req, State, 201);
                false -> has_resp_body(Req, State)
        end.
 
 has_resp_body(Req, State) ->
-       case cowboy_http_req:has_resp_body(Req) of
+       case cowboy_req:has_resp_body(Req) of
                true -> multiple_choices(Req, State);
                false -> respond(Req, State, 204)
        end.
 
+%% Set the Etag header if any for the response provided.
+set_resp_body_etag(Req, State) ->
+       try set_resp_etag(Req, State) of
+               {Req2, State2} ->
+                       set_resp_body_last_modified(Req2, State2)
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, generate_etag)
+       end.
+
+%% Set the Last-Modified header if any for the response provided.
+set_resp_body_last_modified(Req, State) ->
+       try last_modified(Req, State) of
+               {LastModified, Req2, State2} ->
+                       case LastModified of
+                               LastModified when is_atom(LastModified) ->
+                                       set_resp_body_expires(Req2, State2);
+                               LastModified ->
+                                       LastModifiedBin = cowboy_clock:rfc1123(LastModified),
+                                       Req3 = cowboy_req:set_resp_header(
+                                               <<"last-modified">>, LastModifiedBin, Req2),
+                                       set_resp_body_expires(Req3, State2)
+                       end
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, last_modified)
+       end.
+
+%% Set the Expires header if any for the response provided.
+set_resp_body_expires(Req, State) ->
+       try set_resp_expires(Req, State) of
+               {Req2, State2} ->
+                       set_resp_body(Req2, State2)
+       catch Class:Reason ->
+               error_terminate(Req, State, Class, Reason, expires)
+       end.
+
 %% Set the response headers and call the callback found using
 %% content_types_provided/2 to obtain the request body and add
 %% it to the response.
-set_resp_body(Req=#http_req{method=Method},
-               State=#state{content_type_a={_Type, Fun}})
-               when Method =:= 'GET'; Method =:= 'HEAD' ->
-       {Req2, State2} = set_resp_etag(Req, State),
-       {LastModified, Req3, State3} = last_modified(Req2, State2),
-       case LastModified of
-               LastModified when is_atom(LastModified) ->
-                       Req4 = Req3;
-               LastModified ->
-                       LastModifiedStr = httpd_util:rfc1123_date(LastModified),
-                       {ok, Req4} = cowboy_http_req:set_resp_header(
-                               <<"Last-Modified">>, LastModifiedStr, Req3)
-       end,
-       {Req5, State4} = set_resp_expires(Req4, State3),
-       case call(Req5, State4, Fun) of
-               {halt, Req6, HandlerState} ->
-                       terminate(Req6, State4#state{handler_state=HandlerState});
-               {Body, Req6, HandlerState} ->
-                       State5 = State4#state{handler_state=HandlerState},
-                       {ok, Req7} = case Body of
-                               {stream, Len, Fun1} ->
-                                       cowboy_http_req:set_resp_body_fun(Len, Fun1, Req6);
+set_resp_body(Req, State=#state{content_type_a={_, Callback}}) ->
+       try case call(Req, State, Callback) of
+               {halt, Req2, HandlerState2} ->
+                       terminate(Req2, State#state{handler_state=HandlerState2});
+               {Body, Req2, HandlerState2} ->
+                       State2 = State#state{handler_state=HandlerState2},
+                       Req3 = case Body of
+                               {stream, StreamFun} ->
+                                       cowboy_req:set_resp_body_fun(StreamFun, Req2);
+                               {stream, Len, StreamFun} ->
+                                       cowboy_req:set_resp_body_fun(Len, StreamFun, Req2);
+                               {chunked, StreamFun} ->
+                                       cowboy_req:set_resp_body_fun(chunked, StreamFun, Req2);
                                _Contents ->
-                                       cowboy_http_req:set_resp_body(Body, Req6)
+                                       cowboy_req:set_resp_body(Body, Req2)
                        end,
-                       multiple_choices(Req7, State5)
-       end;
-set_resp_body(Req, State) ->
-       multiple_choices(Req, State).
+                       multiple_choices(Req3, State2)
+       end catch Class:Reason = {case_clause, no_call} ->
+               error_terminate(Req, State, Class, Reason, Callback)
+       end.
 
 multiple_choices(Req, State) ->
        expect(Req, State, multiple_choices, false, 200, 300).
@@ -809,20 +886,28 @@ set_resp_etag(Req, State) ->
                undefined ->
                        {Req2, State2};
                Etag ->
-                       {ok, Req3} = cowboy_http_req:set_resp_header(
-                               <<"Etag">>, Etag, Req2),
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"etag">>, encode_etag(Etag), Req2),
                        {Req3, State2}
        end.
 
+-spec encode_etag({strong | weak, binary()}) -> iolist().
+encode_etag({strong, Etag}) -> [$",Etag,$"];
+encode_etag({weak, Etag}) -> ["W/\"",Etag,$"].
+
 set_resp_expires(Req, State) ->
        {Expires, Req2, State2} = expires(Req, State),
        case Expires of
                Expires when is_atom(Expires) ->
                        {Req2, State2};
+               Expires when is_binary(Expires) ->
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"expires">>, Expires, Req2),
+                       {Req3, State2};
                Expires ->
-                       ExpiresStr = httpd_util:rfc1123_date(Expires),
-                       {ok, Req3} = cowboy_http_req:set_resp_header(
-                               <<"Expires">>, ExpiresStr, Req2),
+                       ExpiresBin = cowboy_clock:rfc1123(Expires),
+                       Req3 = cowboy_req:set_resp_header(
+                               <<"expires">>, ExpiresBin, Req2),
                        {Req3, State2}
        end.
 
@@ -831,9 +916,12 @@ set_resp_expires(Req, State) ->
 generate_etag(Req, State=#state{etag=no_call}) ->
        {undefined, Req, State};
 generate_etag(Req, State=#state{etag=undefined}) ->
-       case call(Req, State, generate_etag) of
+       case unsafe_call(Req, State, generate_etag) of
                no_call ->
                        {undefined, Req, State#state{etag=no_call}};
+               {Etag, Req2, HandlerState} when is_binary(Etag) ->
+                       [Etag2] = cowboy_http:entity_tag_match(Etag),
+                       {Etag2, Req2, State#state{handler_state=HandlerState, etag=Etag2}};
                {Etag, Req2, HandlerState} ->
                        {Etag, Req2, State#state{handler_state=HandlerState, etag=Etag}}
        end;
@@ -843,7 +931,7 @@ generate_etag(Req, State=#state{etag=Etag}) ->
 last_modified(Req, State=#state{last_modified=no_call}) ->
        {undefined, Req, State};
 last_modified(Req, State=#state{last_modified=undefined}) ->
-       case call(Req, State, last_modified) of
+       case unsafe_call(Req, State, last_modified) of
                no_call ->
                        {undefined, Req, State#state{last_modified=no_call}};
                {LastModified, Req2, HandlerState} ->
@@ -856,7 +944,7 @@ last_modified(Req, State=#state{last_modified=LastModified}) ->
 expires(Req, State=#state{expires=no_call}) ->
        {undefined, Req, State};
 expires(Req, State=#state{expires=undefined}) ->
-       case call(Req, State, expires) of
+       case unsafe_call(Req, State, expires) of
                no_call ->
                        {undefined, Req, State#state{expires=no_call}};
                {Expires, Req2, HandlerState} ->
@@ -880,9 +968,23 @@ expect(Req, State, Callback, Expected, OnTrue, OnFalse) ->
                        next(Req2, State#state{handler_state=HandlerState}, OnFalse)
        end.
 
-call(Req, #state{handler=Handler, handler_state=HandlerState}, Fun) ->
-       case erlang:function_exported(Handler, Fun, 2) of
-               true -> Handler:Fun(Req, HandlerState);
+call(Req, State=#state{handler=Handler, handler_state=HandlerState},
+               Callback) ->
+       case erlang:function_exported(Handler, Callback, 2) of
+               true ->
+                       try
+                               Handler:Callback(Req, HandlerState)
+                       catch Class:Reason ->
+                               error_terminate(Req, State, Class, Reason, Callback)
+                       end;
+               false ->
+                       no_call
+       end.
+
+unsafe_call(Req, #state{handler=Handler, handler_state=HandlerState},
+               Callback) ->
+       case erlang:function_exported(Handler, Callback, 2) of
+               true -> Handler:Callback(Req, HandlerState);
                false -> no_call
        end.
 
@@ -891,15 +993,30 @@ next(Req, State, Next) when is_function(Next) ->
 next(Req, State, StatusCode) when is_integer(StatusCode) ->
        respond(Req, State, StatusCode).
 
-%% @todo Allow some sort of callback for custom error pages.
 respond(Req, State, StatusCode) ->
-       {ok, Req2} = cowboy_http_req:reply(StatusCode, Req),
+       {ok, Req2} = cowboy_req:reply(StatusCode, Req),
        terminate(Req2, State).
 
-terminate(Req, #state{handler=Handler, handler_state=HandlerState}) ->
+terminate(Req, State=#state{env=Env}) ->
+       rest_terminate(Req, State),
+       {ok, Req, [{result, ok}|Env]}.
+
+error_terminate(Req, State=#state{handler=Handler, handler_state=HandlerState},
+               Class, Reason, Callback) ->
+       Stacktrace = erlang:get_stacktrace(),
+       rest_terminate(Req, State),
+       cowboy_req:maybe_reply(Stacktrace, Req),
+       erlang:Class([
+               {reason, Reason},
+               {mfa, {Handler, Callback, 2}},
+               {stacktrace, Stacktrace},
+               {req, cowboy_req:to_list(Req)},
+               {state, HandlerState}
+       ]).
+
+rest_terminate(Req, #state{handler=Handler, handler_state=HandlerState}) ->
        case erlang:function_exported(Handler, rest_terminate, 2) of
                true -> ok = Handler:rest_terminate(
-                       Req#http_req{resp_state=locked}, HandlerState);
+                       cowboy_req:lock(Req), HandlerState);
                false -> ok
-       end,
-       {ok, Req}.
+       end.
diff --git a/deps/cowboy/src/cowboy_router.erl b/deps/cowboy/src/cowboy_router.erl
new file mode 100644 (file)
index 0000000..ef91c6d
--- /dev/null
@@ -0,0 +1,572 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% Routing middleware.
+%%
+%% Resolve the handler to be used for the request based on the
+%% routing information found in the <em>dispatch</em> environment value.
+%% When found, the handler module and associated data are added to
+%% the environment as the <em>handler</em> and <em>handler_opts</em> values
+%% respectively.
+%%
+%% If the route cannot be found, processing stops with either
+%% a 400 or a 404 reply.
+-module(cowboy_router).
+-behaviour(cowboy_middleware).
+
+-export([compile/1]).
+-export([execute/2]).
+
+-type bindings() :: [{atom(), binary()}].
+-type tokens() :: [binary()].
+-export_type([bindings/0]).
+-export_type([tokens/0]).
+
+-type constraints() :: [{atom(), int}
+       | {atom(), function, fun ((binary()) -> true | {true, any()} | false)}].
+-export_type([constraints/0]).
+
+-type route_match() :: '_' | iodata().
+-type route_path() :: {Path::route_match(), Handler::module(), Opts::any()}
+       | {Path::route_match(), constraints(), Handler::module(), Opts::any()}.
+-type route_rule() :: {Host::route_match(), Paths::[route_path()]}
+       | {Host::route_match(), constraints(), Paths::[route_path()]}.
+-type routes() :: [route_rule()].
+-export_type([routes/0]).
+
+-type dispatch_match() :: '_' | <<_:8>> | [binary() | '_' | '...' | atom()].
+-type dispatch_path() :: {dispatch_match(), module(), any()}.
+-type dispatch_rule() :: {Host::dispatch_match(), Paths::[dispatch_path()]}.
+-opaque dispatch_rules() :: [dispatch_rule()].
+-export_type([dispatch_rules/0]).
+
+-spec compile(routes()) -> dispatch_rules().
+compile(Routes) ->
+       compile(Routes, []).
+
+compile([], Acc) ->
+       lists:reverse(Acc);
+compile([{Host, Paths}|Tail], Acc) ->
+       compile([{Host, [], Paths}|Tail], Acc);
+compile([{HostMatch, Constraints, Paths}|Tail], Acc) ->
+       HostRules = case HostMatch of
+               '_' -> '_';
+               _ -> compile_host(HostMatch)
+       end,
+       PathRules = compile_paths(Paths, []),
+       Hosts = case HostRules of
+               '_' -> [{'_', Constraints, PathRules}];
+               _ -> [{R, Constraints, PathRules} || R <- HostRules]
+       end,
+       compile(Tail, Hosts ++ Acc).
+
+compile_host(HostMatch) when is_list(HostMatch) ->
+       compile_host(list_to_binary(HostMatch));
+compile_host(HostMatch) when is_binary(HostMatch) ->
+       compile_rules(HostMatch, $., [], [], <<>>).
+
+compile_paths([], Acc) ->
+       lists:reverse(Acc);
+compile_paths([{PathMatch, Handler, Opts}|Tail], Acc) ->
+       compile_paths([{PathMatch, [], Handler, Opts}|Tail], Acc);
+compile_paths([{PathMatch, Constraints, Handler, Opts}|Tail], Acc)
+               when is_list(PathMatch) ->
+       compile_paths([{iolist_to_binary(PathMatch),
+               Constraints, Handler, Opts}|Tail], Acc);
+compile_paths([{'_', Constraints, Handler, Opts}|Tail], Acc) ->
+       compile_paths(Tail, [{'_', Constraints, Handler, Opts}] ++ Acc);
+compile_paths([{<< $/, PathMatch/binary >>, Constraints, Handler, Opts}|Tail],
+               Acc) ->
+       PathRules = compile_rules(PathMatch, $/, [], [], <<>>),
+       Paths = [{lists:reverse(R), Constraints, Handler, Opts} || R <- PathRules],
+       compile_paths(Tail, Paths ++ Acc);
+compile_paths([{PathMatch, _, _, _}|_], _) ->
+       error({badarg, "The following route MUST begin with a slash: "
+               ++ binary_to_list(PathMatch)}).
+
+compile_rules(<<>>, _, Segments, Rules, <<>>) ->
+       [Segments|Rules];
+compile_rules(<<>>, _, Segments, Rules, Acc) ->
+       [[Acc|Segments]|Rules];
+compile_rules(<< S, Rest/binary >>, S, Segments, Rules, <<>>) ->
+       compile_rules(Rest, S, Segments, Rules, <<>>);
+compile_rules(<< S, Rest/binary >>, S, Segments, Rules, Acc) ->
+       compile_rules(Rest, S, [Acc|Segments], Rules, <<>>);
+compile_rules(<< $:, Rest/binary >>, S, Segments, Rules, <<>>) ->
+       {NameBin, Rest2} = compile_binding(Rest, S, <<>>),
+       Name = binary_to_atom(NameBin, utf8),
+       compile_rules(Rest2, S, Segments, Rules, Name);
+compile_rules(<< $:, _/binary >>, _, _, _, _) ->
+       erlang:error(badarg);
+compile_rules(<< $[, $., $., $., $], Rest/binary >>, S, Segments, Rules, Acc)
+               when Acc =:= <<>> ->
+       compile_rules(Rest, S, ['...'|Segments], Rules, Acc);
+compile_rules(<< $[, $., $., $., $], Rest/binary >>, S, Segments, Rules, Acc) ->
+       compile_rules(Rest, S, ['...', Acc|Segments], Rules, Acc);
+compile_rules(<< $[, S, Rest/binary >>, S, Segments, Rules, Acc) ->
+       compile_brackets(Rest, S, [Acc|Segments], Rules);
+compile_rules(<< $[, Rest/binary >>, S, Segments, Rules, <<>>) ->
+       compile_brackets(Rest, S, Segments, Rules);
+%% Open bracket in the middle of a segment.
+compile_rules(<< $[, _/binary >>, _, _, _, _) ->
+       erlang:error(badarg);
+%% Missing an open bracket.
+compile_rules(<< $], _/binary >>, _, _, _, _) ->
+       erlang:error(badarg);
+compile_rules(<< C, Rest/binary >>, S, Segments, Rules, Acc) ->
+       compile_rules(Rest, S, Segments, Rules, << Acc/binary, C >>).
+
+%% Everything past $: until the segment separator ($. for hosts,
+%% $/ for paths) or $[ or $] or end of binary is the binding name.
+compile_binding(<<>>, _, <<>>) ->
+       erlang:error(badarg);
+compile_binding(Rest = <<>>, _, Acc) ->
+       {Acc, Rest};
+compile_binding(Rest = << C, _/binary >>, S, Acc)
+               when C =:= S; C =:= $[; C =:= $] ->
+       {Acc, Rest};
+compile_binding(<< C, Rest/binary >>, S, Acc) ->
+       compile_binding(Rest, S, << Acc/binary, C >>).
+
+compile_brackets(Rest, S, Segments, Rules) ->
+       {Bracket, Rest2} = compile_brackets_split(Rest, <<>>, 0),
+       Rules1 = compile_rules(Rest2, S, Segments, [], <<>>),
+       Rules2 = compile_rules(<< Bracket/binary, Rest2/binary >>,
+               S, Segments, [], <<>>),
+       Rules ++ Rules2 ++ Rules1.
+
+%% Missing a close bracket.
+compile_brackets_split(<<>>, _, _) ->
+       erlang:error(badarg);
+%% Make sure we don't confuse the closing bracket we're looking for.
+compile_brackets_split(<< C, Rest/binary >>, Acc, N) when C =:= $[ ->
+       compile_brackets_split(Rest, << Acc/binary, C >>, N + 1);
+compile_brackets_split(<< C, Rest/binary >>, Acc, N) when C =:= $], N > 0 ->
+       compile_brackets_split(Rest, << Acc/binary, C >>, N - 1);
+%% That's the right one.
+compile_brackets_split(<< $], Rest/binary >>, Acc, 0) ->
+       {Acc, Rest};
+compile_brackets_split(<< C, Rest/binary >>, Acc, N) ->
+       compile_brackets_split(Rest, << Acc/binary, C >>, N).
+
+-spec execute(Req, Env)
+       -> {ok, Req, Env} | {error, 400 | 404, Req}
+       when Req::cowboy_req:req(), Env::cowboy_middleware:env().
+execute(Req, Env) ->
+       {_, Dispatch} = lists:keyfind(dispatch, 1, Env),
+       [Host, Path] = cowboy_req:get([host, path], Req),
+       case match(Dispatch, Host, Path) of
+               {ok, Handler, HandlerOpts, Bindings, HostInfo, PathInfo} ->
+                       Req2 = cowboy_req:set_bindings(HostInfo, PathInfo, Bindings, Req),
+                       {ok, Req2, [{handler, Handler}, {handler_opts, HandlerOpts}|Env]};
+               {error, notfound, host} ->
+                       {error, 400, Req};
+               {error, badrequest, path} ->
+                       {error, 400, Req};
+               {error, notfound, path} ->
+                       {error, 404, Req}
+       end.
+
+%% Internal.
+
+%% Match hostname tokens and path tokens against dispatch rules.
+%%
+%% It is typically used for matching tokens for the hostname and path of
+%% the request against a global dispatch rule for your listener.
+%%
+%% Dispatch rules are a list of <em>{Hostname, PathRules}</em> tuples, with
+%% <em>PathRules</em> being a list of <em>{Path, HandlerMod, HandlerOpts}</em>.
+%%
+%% <em>Hostname</em> and <em>Path</em> are match rules and can be either the
+%% atom <em>'_'</em>, which matches everything, `<<"*">>', which match the
+%% wildcard path, or a list of tokens.
+%%
+%% Each token can be either a binary, the atom <em>'_'</em>,
+%% the atom '...' or a named atom. A binary token must match exactly,
+%% <em>'_'</em> matches everything for a single token, <em>'...'</em> matches
+%% everything for the rest of the tokens and a named atom will bind the
+%% corresponding token value and return it.
+%%
+%% The list of hostname tokens is reversed before matching. For example, if
+%% we were to match "www.ninenines.eu", we would first match "eu", then
+%% "ninenines", then "www". This means that in the context of hostnames,
+%% the <em>'...'</em> atom matches properly the lower levels of the domain
+%% as would be expected.
+%%
+%% When a result is found, this function will return the handler module and
+%% options found in the dispatch list, a key-value list of bindings and
+%% the tokens that were matched by the <em>'...'</em> atom for both the
+%% hostname and path.
+-spec match(dispatch_rules(), Host::binary() | tokens(), Path::binary())
+       -> {ok, module(), any(), bindings(),
+               HostInfo::undefined | tokens(),
+               PathInfo::undefined | tokens()}
+       | {error, notfound, host} | {error, notfound, path}
+       | {error, badrequest, path}.
+match([], _, _) ->
+       {error, notfound, host};
+%% If the host is '_' then there can be no constraints.
+match([{'_', [], PathMatchs}|_Tail], _, Path) ->
+       match_path(PathMatchs, undefined, Path, []);
+match([{HostMatch, Constraints, PathMatchs}|Tail], Tokens, Path)
+               when is_list(Tokens) ->
+       case list_match(Tokens, HostMatch, []) of
+               false ->
+                       match(Tail, Tokens, Path);
+               {true, Bindings, HostInfo} ->
+                       HostInfo2 = case HostInfo of
+                               undefined -> undefined;
+                               _ -> lists:reverse(HostInfo)
+                       end,
+                       case check_constraints(Constraints, Bindings) of
+                               {ok, Bindings2} ->
+                                       match_path(PathMatchs, HostInfo2, Path, Bindings2);
+                               nomatch ->
+                                       match(Tail, Tokens, Path)
+                       end
+       end;
+match(Dispatch, Host, Path) ->
+       match(Dispatch, split_host(Host), Path).
+
+-spec match_path([dispatch_path()],
+       HostInfo::undefined | tokens(), binary() | tokens(), bindings())
+       -> {ok, module(), any(), bindings(),
+               HostInfo::undefined | tokens(),
+               PathInfo::undefined | tokens()}
+       | {error, notfound, path} | {error, badrequest, path}.
+match_path([], _, _, _) ->
+       {error, notfound, path};
+%% If the path is '_' then there can be no constraints.
+match_path([{'_', [], Handler, Opts}|_Tail], HostInfo, _, Bindings) ->
+       {ok, Handler, Opts, Bindings, HostInfo, undefined};
+match_path([{<<"*">>, _Constraints, Handler, Opts}|_Tail], HostInfo, <<"*">>, Bindings) ->
+       {ok, Handler, Opts, Bindings, HostInfo, undefined};
+match_path([{PathMatch, Constraints, Handler, Opts}|Tail], HostInfo, Tokens,
+               Bindings) when is_list(Tokens) ->
+       case list_match(Tokens, PathMatch, Bindings) of
+               false ->
+                       match_path(Tail, HostInfo, Tokens, Bindings);
+               {true, PathBinds, PathInfo} ->
+                       case check_constraints(Constraints, PathBinds) of
+                               {ok, PathBinds2} ->
+                                       {ok, Handler, Opts, PathBinds2, HostInfo, PathInfo};
+                               nomatch ->
+                                       match_path(Tail, HostInfo, Tokens, Bindings)
+                       end
+       end;
+match_path(_Dispatch, _HostInfo, badrequest, _Bindings) ->
+       {error, badrequest, path};
+match_path(Dispatch, HostInfo, Path, Bindings) ->
+       match_path(Dispatch, HostInfo, split_path(Path), Bindings).
+
+check_constraints([], Bindings) ->
+       {ok, Bindings};
+check_constraints([Constraint|Tail], Bindings) ->
+       Name = element(1, Constraint),
+       case lists:keyfind(Name, 1, Bindings) of
+               false ->
+                       check_constraints(Tail, Bindings);
+               {_, Value} ->
+                       case check_constraint(Constraint, Value) of
+                               true ->
+                                       check_constraints(Tail, Bindings);
+                               {true, Value2} ->
+                                       Bindings2 = lists:keyreplace(Name, 1, Bindings,
+                                               {Name, Value2}),
+                                       check_constraints(Tail, Bindings2);
+                               false ->
+                                       nomatch
+                       end
+       end.
+
+check_constraint({_, int}, Value) ->
+       try {true, list_to_integer(binary_to_list(Value))}
+       catch _:_ -> false
+       end;
+check_constraint({_, function, Fun}, Value) ->
+       Fun(Value).
+
+-spec split_host(binary()) -> tokens().
+split_host(Host) ->
+       split_host(Host, []).
+
+split_host(Host, Acc) ->
+       case binary:match(Host, <<".">>) of
+               nomatch when Host =:= <<>> ->
+                       Acc;
+               nomatch ->
+                       [Host|Acc];
+               {Pos, _} ->
+                       << Segment:Pos/binary, _:8, Rest/bits >> = Host,
+                       false = byte_size(Segment) == 0,
+                       split_host(Rest, [Segment|Acc])
+       end.
+
+%% Following RFC2396, this function may return path segments containing any
+%% character, including <em>/</em> if, and only if, a <em>/</em> was escaped
+%% and part of a path segment.
+-spec split_path(binary()) -> tokens().
+split_path(<< $/, Path/bits >>) ->
+       split_path(Path, []);
+split_path(_) ->
+       badrequest.
+
+split_path(Path, Acc) ->
+       try
+               case binary:match(Path, <<"/">>) of
+                       nomatch when Path =:= <<>> ->
+                               lists:reverse([cow_qs:urldecode(S) || S <- Acc]);
+                       nomatch ->
+                               lists:reverse([cow_qs:urldecode(S) || S <- [Path|Acc]]);
+                       {Pos, _} ->
+                               << Segment:Pos/binary, _:8, Rest/bits >> = Path,
+                               split_path(Rest, [Segment|Acc])
+               end
+       catch
+               error:badarg ->
+                       badrequest
+       end.
+
+-spec list_match(tokens(), dispatch_match(), bindings())
+       -> {true, bindings(), undefined | tokens()} | false.
+%% Atom '...' matches any trailing path, stop right now.
+list_match(List, ['...'], Binds) ->
+       {true, Binds, List};
+%% Atom '_' matches anything, continue.
+list_match([_E|Tail], ['_'|TailMatch], Binds) ->
+       list_match(Tail, TailMatch, Binds);
+%% Both values match, continue.
+list_match([E|Tail], [E|TailMatch], Binds) ->
+       list_match(Tail, TailMatch, Binds);
+%% Bind E to the variable name V and continue,
+%% unless V was already defined and E isn't identical to the previous value.
+list_match([E|Tail], [V|TailMatch], Binds) when is_atom(V) ->
+       case lists:keyfind(V, 1, Binds) of
+               {_, E} ->
+                       list_match(Tail, TailMatch, Binds);
+               {_, _} ->
+                       false;
+               false ->
+                       list_match(Tail, TailMatch, [{V, E}|Binds])
+       end;
+%% Match complete.
+list_match([], [], Binds) ->
+       {true, Binds, undefined};
+%% Values don't match, stop.
+list_match(_List, _Match, _Binds) ->
+       false.
+
+%% Tests.
+
+-ifdef(TEST).
+compile_test_() ->
+       Tests = [
+               %% Match any host and path.
+               {[{'_', [{'_', h, o}]}],
+                       [{'_', [], [{'_', [], h, o}]}]},
+               {[{"cowboy.example.org",
+                               [{"/", ha, oa}, {"/path/to/resource", hb, ob}]}],
+                       [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [
+                               {[], [], ha, oa},
+                               {[<<"path">>, <<"to">>, <<"resource">>], [], hb, ob}]}]},
+               {[{'_', [{"/path/to/resource/", h, o}]}],
+                       [{'_', [], [{[<<"path">>, <<"to">>, <<"resource">>], [], h, o}]}]},
+               % Cyrillic from a latin1 encoded file.
+               {[{'_', [{[47,208,191,209,131,209,130,209,140,47,208,186,47,209,128,
+                               208,181,209,129,209,131,209,128,209,129,209,131,47], h, o}]}],
+                       [{'_', [], [{[<<208,191,209,131,209,130,209,140>>, <<208,186>>,
+                               <<209,128,208,181,209,129,209,131,209,128,209,129,209,131>>],
+                               [], h, o}]}]},
+               {[{"cowboy.example.org.", [{'_', h, o}]}],
+                       [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [{'_', [], h, o}]}]},
+               {[{".cowboy.example.org", [{'_', h, o}]}],
+                       [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [{'_', [], h, o}]}]},
+               % Cyrillic from a latin1 encoded file.
+               {[{[208,189,208,181,208,186,208,184,208,185,46,209,129,208,176,
+                               208,185,209,130,46,209,128,209,132,46], [{'_', h, o}]}],
+                       [{[<<209,128,209,132>>, <<209,129,208,176,208,185,209,130>>,
+                               <<208,189,208,181,208,186,208,184,208,185>>],
+                               [], [{'_', [], h, o}]}]},
+               {[{":subdomain.example.org", [{"/hats/:name/prices", h, o}]}],
+                       [{[<<"org">>, <<"example">>, subdomain], [], [
+                               {[<<"hats">>, name, <<"prices">>], [], h, o}]}]},
+               {[{"ninenines.:_", [{"/hats/:_", h, o}]}],
+                       [{['_', <<"ninenines">>], [], [{[<<"hats">>, '_'], [], h, o}]}]},
+               {[{"[www.]ninenines.eu",
+                       [{"/horses", h, o}, {"/hats/[page/:number]", h, o}]}], [
+                               {[<<"eu">>, <<"ninenines">>], [], [
+                                       {[<<"horses">>], [], h, o},
+                                       {[<<"hats">>], [], h, o},
+                                       {[<<"hats">>, <<"page">>, number], [], h, o}]},
+                               {[<<"eu">>, <<"ninenines">>, <<"www">>], [], [
+                                       {[<<"horses">>], [], h, o},
+                                       {[<<"hats">>], [], h, o},
+                                       {[<<"hats">>, <<"page">>, number], [], h, o}]}]},
+               {[{'_', [{"/hats/[page/[:number]]", h, o}]}], [{'_', [], [
+                       {[<<"hats">>], [], h, o},
+                       {[<<"hats">>, <<"page">>], [], h, o},
+                       {[<<"hats">>, <<"page">>, number], [], h, o}]}]},
+               {[{"[...]ninenines.eu", [{"/hats/[...]", h, o}]}],
+                       [{[<<"eu">>, <<"ninenines">>, '...'], [], [
+                               {[<<"hats">>, '...'], [], h, o}]}]}
+       ],
+       [{lists:flatten(io_lib:format("~p", [Rt])),
+               fun() -> Rs = compile(Rt) end} || {Rt, Rs} <- Tests].
+
+split_host_test_() ->
+       Tests = [
+               {<<"">>, []},
+               {<<"*">>, [<<"*">>]},
+               {<<"cowboy.ninenines.eu">>,
+                       [<<"eu">>, <<"ninenines">>, <<"cowboy">>]},
+               {<<"ninenines.eu">>,
+                       [<<"eu">>, <<"ninenines">>]},
+               {<<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>,
+                       [<<"z">>, <<"y">>, <<"x">>, <<"w">>, <<"v">>, <<"u">>, <<"t">>,
+                       <<"s">>, <<"r">>, <<"q">>, <<"p">>, <<"o">>, <<"n">>, <<"m">>,
+                       <<"l">>, <<"k">>, <<"j">>, <<"i">>, <<"h">>, <<"g">>, <<"f">>,
+                       <<"e">>, <<"d">>, <<"c">>, <<"b">>, <<"a">>]}
+       ],
+       [{H, fun() -> R = split_host(H) end} || {H, R} <- Tests].
+
+split_path_test_() ->
+       Tests = [
+               {<<"/">>, []},
+               {<<"/extend//cowboy">>, [<<"extend">>, <<>>, <<"cowboy">>]},
+               {<<"/users">>, [<<"users">>]},
+               {<<"/users/42/friends">>, [<<"users">>, <<"42">>, <<"friends">>]},
+               {<<"/users/a+b/c%21d">>, [<<"users">>, <<"a b">>, <<"c!d">>]}
+       ],
+       [{P, fun() -> R = split_path(P) end} || {P, R} <- Tests].
+
+match_test_() ->
+       Dispatch = [
+               {[<<"eu">>, <<"ninenines">>, '_', <<"www">>], [], [
+                       {[<<"users">>, '_', <<"mails">>], [], match_any_subdomain_users, []}
+               ]},
+               {[<<"eu">>, <<"ninenines">>], [], [
+                       {[<<"users">>, id, <<"friends">>], [], match_extend_users_friends, []},
+                       {'_', [], match_extend, []}
+               ]},
+               {[var, <<"ninenines">>], [], [
+                       {[<<"threads">>, var], [], match_duplicate_vars,
+                               [we, {expect, two}, var, here]}
+               ]},
+               {[ext, <<"erlang">>], [], [
+                       {'_', [], match_erlang_ext, []}
+               ]},
+               {'_', [], [
+                       {[<<"users">>, id, <<"friends">>], [], match_users_friends, []},
+                       {'_', [], match_any, []}
+               ]}
+       ],
+       Tests = [
+               {<<"any">>, <<"/">>, {ok, match_any, [], []}},
+               {<<"www.any.ninenines.eu">>, <<"/users/42/mails">>,
+                       {ok, match_any_subdomain_users, [], []}},
+               {<<"www.ninenines.eu">>, <<"/users/42/mails">>,
+                       {ok, match_any, [], []}},
+               {<<"www.ninenines.eu">>, <<"/">>,
+                       {ok, match_any, [], []}},
+               {<<"www.any.ninenines.eu">>, <<"/not_users/42/mails">>,
+                       {error, notfound, path}},
+               {<<"ninenines.eu">>, <<"/">>,
+                       {ok, match_extend, [], []}},
+               {<<"ninenines.eu">>, <<"/users/42/friends">>,
+                       {ok, match_extend_users_friends, [], [{id, <<"42">>}]}},
+               {<<"erlang.fr">>, '_',
+                       {ok, match_erlang_ext, [], [{ext, <<"fr">>}]}},
+               {<<"any">>, <<"/users/444/friends">>,
+                       {ok, match_users_friends, [], [{id, <<"444">>}]}}
+       ],
+       [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() ->
+               {ok, Handler, Opts, Binds, undefined, undefined}
+                       = match(Dispatch, H, P)
+       end} || {H, P, {ok, Handler, Opts, Binds}} <- Tests].
+
+match_info_test_() ->
+       Dispatch = [
+               {[<<"eu">>, <<"ninenines">>, <<"www">>], [], [
+                       {[<<"pathinfo">>, <<"is">>, <<"next">>, '...'], [], match_path, []}
+               ]},
+               {[<<"eu">>, <<"ninenines">>, '...'], [], [
+                       {'_', [], match_any, []}
+               ]},
+               % Cyrillic from a latin1 encoded file.
+               {[<<209,128,209,132>>, <<209,129,208,176,208,185,209,130>>], [], [
+                       {[<<208,191,209,131,209,130,209,140>>, '...'], [], match_path, []}
+               ]}
+       ],
+       Tests = [
+               {<<"ninenines.eu">>, <<"/">>,
+                       {ok, match_any, [], [], [], undefined}},
+               {<<"bugs.ninenines.eu">>, <<"/">>,
+                       {ok, match_any, [], [], [<<"bugs">>], undefined}},
+               {<<"cowboy.bugs.ninenines.eu">>, <<"/">>,
+                       {ok, match_any, [], [], [<<"cowboy">>, <<"bugs">>], undefined}},
+               {<<"www.ninenines.eu">>, <<"/pathinfo/is/next">>,
+                       {ok, match_path, [], [], undefined, []}},
+               {<<"www.ninenines.eu">>, <<"/pathinfo/is/next/path_info">>,
+                       {ok, match_path, [], [], undefined, [<<"path_info">>]}},
+               {<<"www.ninenines.eu">>, <<"/pathinfo/is/next/foo/bar">>,
+                       {ok, match_path, [], [], undefined, [<<"foo">>, <<"bar">>]}},
+               % Cyrillic from a latin1 encoded file.
+               {<<209,129,208,176,208,185,209,130,46,209,128,209,132>>,
+                       <<47,208,191,209,131,209,130,209,140,47,208,180,208,190,208,188,208,190,208,185>>,
+                       {ok, match_path, [], [], undefined, [<<208,180,208,190,208,188,208,190,208,185>>]}}
+       ],
+       [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() ->
+               R = match(Dispatch, H, P)
+       end} || {H, P, R} <- Tests].
+
+match_constraints_test() ->
+       Dispatch = [{'_', [],
+               [{[<<"path">>, value], [{value, int}], match, []}]}],
+       {ok, _, [], [{value, 123}], _, _} = match(Dispatch,
+               <<"ninenines.eu">>, <<"/path/123">>),
+       {ok, _, [], [{value, 123}], _, _} = match(Dispatch,
+               <<"ninenines.eu">>, <<"/path/123/">>),
+       {error, notfound, path} = match(Dispatch,
+               <<"ninenines.eu">>, <<"/path/NaN/">>),
+       Dispatch2 = [{'_', [],
+               [{[<<"path">>, username], [{username, function,
+               fun(Value) -> Value =:= cowboy_bstr:to_lower(Value) end}],
+               match, []}]}],
+       {ok, _, [], [{username, <<"essen">>}], _, _} = match(Dispatch2,
+               <<"ninenines.eu">>, <<"/path/essen">>),
+       {error, notfound, path} = match(Dispatch2,
+               <<"ninenines.eu">>, <<"/path/ESSEN">>),
+       ok.
+
+match_same_bindings_test() ->
+       Dispatch = [{[same, same], [], [{'_', [], match, []}]}],
+       {ok, _, [], [{same, <<"eu">>}], _, _} = match(Dispatch,
+               <<"eu.eu">>, <<"/">>),
+       {error, notfound, host} = match(Dispatch,
+               <<"ninenines.eu">>, <<"/">>),
+       Dispatch2 = [{[<<"eu">>, <<"ninenines">>, user], [],
+               [{[<<"path">>, user], [], match, []}]}],
+       {ok, _, [], [{user, <<"essen">>}], _, _} = match(Dispatch2,
+               <<"essen.ninenines.eu">>, <<"/path/essen">>),
+       {ok, _, [], [{user, <<"essen">>}], _, _} = match(Dispatch2,
+               <<"essen.ninenines.eu">>, <<"/path/essen/">>),
+       {error, notfound, path} = match(Dispatch2,
+               <<"essen.ninenines.eu">>, <<"/path/notessen">>),
+       Dispatch3 = [{'_', [], [{[same, same], [], match, []}]}],
+       {ok, _, [], [{same, <<"path">>}], _, _} = match(Dispatch3,
+               <<"ninenines.eu">>, <<"/path/path">>),
+       {error, notfound, path} = match(Dispatch3,
+               <<"ninenines.eu">>, <<"/path/to">>),
+       ok.
+-endif.
diff --git a/deps/cowboy/src/cowboy_spdy.erl b/deps/cowboy/src/cowboy_spdy.erl
new file mode 100644 (file)
index 0000000..8da9613
--- /dev/null
@@ -0,0 +1,505 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_spdy).
+
+%% API.
+-export([start_link/4]).
+
+%% Internal.
+-export([init/5]).
+-export([system_continue/3]).
+-export([system_terminate/4]).
+-export([system_code_change/4]).
+
+%% Internal request process.
+-export([request_init/11]).
+-export([resume/5]).
+-export([reply/4]).
+-export([stream_reply/3]).
+-export([stream_data/2]).
+-export([stream_close/1]).
+
+%% Internal transport functions.
+-export([name/0]).
+-export([messages/0]).
+-export([recv/3]).
+-export([send/2]).
+-export([sendfile/2]).
+-export([setopts/2]).
+
+-type streamid() :: non_neg_integer().
+-type socket() :: {pid(), streamid()}.
+
+-record(child, {
+       streamid :: streamid(),
+       pid :: pid(),
+       input = nofin :: fin | nofin,
+       in_buffer = <<>> :: binary(),
+       is_recv = false :: false | {active, socket(), pid()}
+               | {passive, socket(), pid(), non_neg_integer(), reference()},
+       output = nofin :: fin | nofin
+}).
+
+-record(state, {
+       parent = undefined :: pid(),
+       socket,
+       transport,
+       buffer = <<>> :: binary(),
+       middlewares,
+       env,
+       onrequest,
+       onresponse,
+       peer,
+       zdef,
+       zinf,
+       last_streamid = 0 :: streamid(),
+       children = [] :: [#child{}]
+}).
+
+-type opts() :: [{env, cowboy_middleware:env()}
+       | {middlewares, [module()]}
+       | {onrequest, cowboy:onrequest_fun()}
+       | {onresponse, cowboy:onresponse_fun()}].
+-export_type([opts/0]).
+
+%% API.
+
+-spec start_link(any(), inet:socket(), module(), any()) -> {ok, pid()}.
+start_link(Ref, Socket, Transport, Opts) ->
+       proc_lib:start_link(?MODULE, init,
+               [self(), Ref, Socket, Transport, Opts]).
+
+%% Internal.
+
+%% Faster alternative to proplists:get_value/3.
+get_value(Key, Opts, Default) ->
+       case lists:keyfind(Key, 1, Opts) of
+               {_, Value} -> Value;
+               _ -> Default
+       end.
+
+-spec init(pid(), ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Parent, Ref, Socket, Transport, Opts) ->
+       process_flag(trap_exit, true),
+       ok = proc_lib:init_ack(Parent, {ok, self()}),
+       {ok, Peer} = Transport:peername(Socket),
+       Middlewares = get_value(middlewares, Opts, [cowboy_router, cowboy_handler]),
+       Env = [{listener, Ref}|get_value(env, Opts, [])],
+       OnRequest = get_value(onrequest, Opts, undefined),
+       OnResponse = get_value(onresponse, Opts, undefined),
+       Zdef = cow_spdy:deflate_init(),
+       Zinf = cow_spdy:inflate_init(),
+       ok = ranch:accept_ack(Ref),
+       loop(#state{parent=Parent, socket=Socket, transport=Transport,
+               middlewares=Middlewares, env=Env, onrequest=OnRequest,
+               onresponse=OnResponse, peer=Peer, zdef=Zdef, zinf=Zinf}).
+
+loop(State=#state{parent=Parent, socket=Socket, transport=Transport,
+               buffer=Buffer, children=Children}) ->
+       {OK, Closed, Error} = Transport:messages(),
+       Transport:setopts(Socket, [{active, once}]),
+       receive
+               {OK, Socket, Data} ->
+                       parse_frame(State, << Buffer/binary, Data/binary >>);
+               {Closed, Socket} ->
+                       terminate(State);
+               {Error, Socket, _Reason} ->
+                       terminate(State);
+               {recv, FromSocket = {Pid, StreamID}, FromPid, Length, Timeout}
+                               when Pid =:= self() ->
+                       Child = #child{in_buffer=InBuffer, is_recv=false}
+                               = get_child(StreamID, State),
+                       if
+                               Length =:= 0, InBuffer =/= <<>> ->
+                                       FromPid ! {recv, FromSocket, {ok, InBuffer}},
+                                       loop(replace_child(Child#child{in_buffer= <<>>}, State));
+                               byte_size(InBuffer) >= Length ->
+                                       << Data:Length/binary, Rest/binary >> = InBuffer,
+                                       FromPid ! {recv, FromSocket, {ok, Data}},
+                                       loop(replace_child(Child#child{in_buffer=Rest}, State));
+                               true ->
+                                       TRef = erlang:send_after(Timeout, self(),
+                                               {recv_timeout, FromSocket}),
+                                       loop(replace_child(Child#child{
+                                               is_recv={passive, FromSocket, FromPid, Length, TRef}},
+                                               State))
+                       end;
+               {recv_timeout, {Pid, StreamID}}
+                               when Pid =:= self() ->
+                       Child = #child{is_recv={passive, FromSocket, FromPid, _, _}}
+                               = get_child(StreamID, State),
+                       FromPid ! {recv, FromSocket, {error, timeout}},
+                       loop(replace_child(Child, State));
+               {reply, {Pid, StreamID}, Status, Headers}
+                               when Pid =:= self() ->
+                       Child = #child{output=nofin} = get_child(StreamID, State),
+                       syn_reply(State, StreamID, true, Status, Headers),
+                       loop(replace_child(Child#child{output=fin}, State));
+               {reply, {Pid, StreamID}, Status, Headers, Body}
+                               when Pid =:= self() ->
+                       Child = #child{output=nofin} = get_child(StreamID, State),
+                       syn_reply(State, StreamID, false, Status, Headers),
+                       data(State, StreamID, true, Body),
+                       loop(replace_child(Child#child{output=fin}, State));
+               {stream_reply, {Pid, StreamID}, Status, Headers}
+                               when Pid =:= self() ->
+                       #child{output=nofin} = get_child(StreamID, State),
+                       syn_reply(State, StreamID, false, Status, Headers),
+                       loop(State);
+               {stream_data, {Pid, StreamID}, Data}
+                               when Pid =:= self() ->
+                       #child{output=nofin} = get_child(StreamID, State),
+                       data(State, StreamID, false, Data),
+                       loop(State);
+               {stream_close, {Pid, StreamID}}
+                               when Pid =:= self() ->
+                       Child = #child{output=nofin} = get_child(StreamID, State),
+                       data(State, StreamID, true, <<>>),
+                       loop(replace_child(Child#child{output=fin}, State));
+               {sendfile, {Pid, StreamID}, Filepath}
+                               when Pid =:= self() ->
+                       Child = #child{output=nofin} = get_child(StreamID, State),
+                       data_from_file(State, StreamID, Filepath),
+                       loop(replace_child(Child#child{output=fin}, State));
+               {active, FromSocket = {Pid, StreamID}, FromPid} when Pid =:= self() ->
+                       Child = #child{in_buffer=InBuffer, is_recv=false}
+                               = get_child(StreamID, State),
+                       case InBuffer of
+                               <<>> ->
+                                       loop(replace_child(Child#child{
+                                               is_recv={active, FromSocket, FromPid}}, State));
+                               _ ->
+                                       FromPid ! {spdy, FromSocket, InBuffer},
+                                       loop(replace_child(Child#child{in_buffer= <<>>}, State))
+                       end;
+               {passive, FromSocket = {Pid, StreamID}, FromPid} when Pid =:= self() ->
+                       Child = #child{is_recv=IsRecv} = get_child(StreamID, State),
+                       %% Make sure we aren't in the middle of a recv call.
+                       case IsRecv of false -> ok; {active, FromSocket, FromPid} -> ok end,
+                       loop(replace_child(Child#child{is_recv=false}, State));
+               {'EXIT', Parent, Reason} ->
+                       exit(Reason);
+               {'EXIT', Pid, _} ->
+                       %% @todo Report the error if any.
+                       loop(delete_child(Pid, State));
+               {system, From, Request} ->
+                       sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State);
+               %% Calls from the supervisor module.
+               {'$gen_call', {To, Tag}, which_children} ->
+                       Workers = [{?MODULE, Pid, worker, [?MODULE]}
+                               || #child{pid=Pid} <- Children],
+                       To ! {Tag, Workers},
+                       loop(State);
+               {'$gen_call', {To, Tag}, count_children} ->
+                       NbChildren = length(Children),
+                       Counts = [{specs, 1}, {active, NbChildren},
+                               {supervisors, 0}, {workers, NbChildren}],
+                       To ! {Tag, Counts},
+                       loop(State);
+               {'$gen_call', {To, Tag}, _} ->
+                       To ! {Tag, {error, ?MODULE}},
+                       loop(State)
+       after 60000 ->
+               goaway(State, ok),
+               terminate(State)
+       end.
+
+-spec system_continue(_, _, #state{}) -> ok.
+system_continue(_, _, State) ->
+       loop(State).
+
+-spec system_terminate(any(), _, _, _) -> no_return().
+system_terminate(Reason, _, _, _) ->
+       exit(Reason).
+
+-spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::#state{}.
+system_code_change(Misc, _, _, _) ->
+       {ok, Misc}.
+
+parse_frame(State=#state{zinf=Zinf}, Data) ->
+       case cow_spdy:split(Data) of
+               {true, Frame, Rest} ->
+                       P = cow_spdy:parse(Frame, Zinf),
+                       case handle_frame(State#state{buffer = Rest}, P) of
+                               error ->
+                                       terminate(State);
+                               State2 ->
+                                       parse_frame(State2, Rest)
+                       end;
+               false ->
+                       loop(State#state{buffer=Data})
+       end.
+
+%% FLAG_UNIDIRECTIONAL can only be set by the server.
+handle_frame(State, {syn_stream, StreamID, _, _, true,
+               _, _, _, _, _, _, _}) ->
+       rst_stream(State, StreamID, protocol_error),
+       State;
+%% We do not support Associated-To-Stream-ID.
+handle_frame(State, {syn_stream, StreamID, AssocToStreamID,
+               _, _, _, _, _, _, _, _, _}) when AssocToStreamID =/= 0 ->
+       rst_stream(State, StreamID, internal_error),
+       State;
+%% SYN_STREAM.
+%%
+%% Erlang does not allow us to control the priority of processes
+%% so we ignore that value entirely.
+handle_frame(State=#state{middlewares=Middlewares, env=Env,
+               onrequest=OnRequest, onresponse=OnResponse, peer=Peer},
+               {syn_stream, StreamID, _, IsFin, _, _,
+               Method, _, Host, Path, Version, Headers}) ->
+       Pid = spawn_link(?MODULE, request_init, [
+               {self(), StreamID}, Peer, OnRequest, OnResponse,
+               Env, Middlewares, Method, Host, Path, Version, Headers
+       ]),
+       new_child(State, StreamID, Pid, IsFin);
+%% RST_STREAM.
+handle_frame(State, {rst_stream, StreamID, Status}) ->
+       error_logger:error_msg("Received RST_STREAM frame ~p ~p",
+               [StreamID, Status]),
+       %% @todo Stop StreamID.
+       State;
+%% PING initiated by the server; ignore, we don't send any.
+handle_frame(State, {ping, PingID}) when PingID rem 2 =:= 0 ->
+       error_logger:error_msg("Ignored PING control frame: ~p~n", [PingID]),
+       State;
+%% PING initiated by the client; send it back.
+handle_frame(State=#state{socket=Socket, transport=Transport},
+               {ping, PingID}) ->
+       Transport:send(Socket, cow_spdy:ping(PingID)),
+       State;
+%% Data received for a stream.
+handle_frame(State, {data, StreamID, IsFin, Data}) ->
+       Child = #child{input=nofin, in_buffer=Buffer, is_recv=IsRecv}
+               = get_child(StreamID, State),
+       Data2 = << Buffer/binary, Data/binary >>,
+       IsFin2 = if IsFin -> fin; true -> nofin end,
+       Child2 = case IsRecv of
+               {active, FromSocket, FromPid} ->
+                       FromPid ! {spdy, FromSocket, Data},
+                       Child#child{input=IsFin2, is_recv=false};
+               {passive, FromSocket, FromPid, 0, TRef} ->
+                       FromPid ! {recv, FromSocket, {ok, Data2}},
+                       cancel_recv_timeout(StreamID, TRef),
+                       Child#child{input=IsFin2, in_buffer= <<>>, is_recv=false};
+               {passive, FromSocket, FromPid, Length, TRef}
+                               when byte_size(Data2) >= Length ->
+                       << Data3:Length/binary, Rest/binary >> = Data2,
+                       FromPid ! {recv, FromSocket, {ok, Data3}},
+                       cancel_recv_timeout(StreamID, TRef),
+                       Child#child{input=IsFin2, in_buffer=Rest, is_recv=false};
+               _ ->
+                       Child#child{input=IsFin2, in_buffer=Data2}
+       end,
+       replace_child(Child2, State);
+%% General error, can't recover.
+handle_frame(State, {error, badprotocol}) ->
+       goaway(State, protocol_error),
+       error;
+%% Ignore all other frames for now.
+handle_frame(State, Frame) ->
+       error_logger:error_msg("Ignored frame ~p", [Frame]),
+       State.
+
+cancel_recv_timeout(StreamID, TRef) ->
+       _ = erlang:cancel_timer(TRef),
+       receive
+               {recv_timeout, {Pid, StreamID}}
+                               when Pid =:= self() ->
+                       ok
+       after 0 ->
+               ok
+       end.
+
+%% @todo We must wait for the children to finish here,
+%% but only up to N milliseconds. Then we shutdown.
+terminate(_State) ->
+       ok.
+
+syn_reply(#state{socket=Socket, transport=Transport, zdef=Zdef},
+               StreamID, IsFin, Status, Headers) ->
+       Transport:send(Socket, cow_spdy:syn_reply(Zdef, StreamID, IsFin,
+               Status, <<"HTTP/1.1">>, Headers)).
+
+rst_stream(#state{socket=Socket, transport=Transport}, StreamID, Status) ->
+       Transport:send(Socket, cow_spdy:rst_stream(StreamID, Status)).
+
+goaway(#state{socket=Socket, transport=Transport, last_streamid=LastStreamID},
+               Status) ->
+       Transport:send(Socket, cow_spdy:goaway(LastStreamID, Status)).
+
+data(#state{socket=Socket, transport=Transport}, StreamID, IsFin, Data) ->
+       Transport:send(Socket, cow_spdy:data(StreamID, IsFin, Data)).
+
+data_from_file(#state{socket=Socket, transport=Transport},
+               StreamID, Filepath) ->
+       {ok, IoDevice} = file:open(Filepath, [read, binary, raw]),
+       data_from_file(Socket, Transport, StreamID, IoDevice).
+
+data_from_file(Socket, Transport, StreamID, IoDevice) ->
+       case file:read(IoDevice, 16#1fff) of
+               eof ->
+                       _ = Transport:send(Socket, cow_spdy:data(StreamID, true, <<>>)),
+                       ok;
+               {ok, Data} ->
+                       case Transport:send(Socket, cow_spdy:data(StreamID, false, Data)) of
+                               ok ->
+                                       data_from_file(Socket, Transport, StreamID, IoDevice);
+                               {error, _} ->
+                                       ok
+                       end
+       end.
+
+%% Children.
+
+new_child(State=#state{children=Children}, StreamID, Pid, IsFin) ->
+       IsFin2 = if IsFin -> fin; true -> nofin end,
+       State#state{last_streamid=StreamID,
+               children=[#child{streamid=StreamID,
+               pid=Pid, input=IsFin2}|Children]}.
+
+get_child(StreamID, #state{children=Children}) ->
+       lists:keyfind(StreamID, #child.streamid, Children).
+
+replace_child(Child=#child{streamid=StreamID},
+               State=#state{children=Children}) ->
+       Children2 = lists:keyreplace(StreamID, #child.streamid, Children, Child),
+       State#state{children=Children2}.
+
+delete_child(Pid, State=#state{children=Children}) ->
+       Children2 = lists:keydelete(Pid, #child.pid, Children),
+       State#state{children=Children2}.
+
+%% Request process.
+
+-spec request_init(socket(), {inet:ip_address(), inet:port_number()},
+               cowboy:onrequest_fun(), cowboy:onresponse_fun(),
+               cowboy_middleware:env(), [module()],
+               binary(), binary(), binary(), binary(), [{binary(), binary()}])
+       -> ok.
+request_init(FakeSocket, Peer, OnRequest, OnResponse,
+               Env, Middlewares, Method, Host, Path, Version, Headers) ->
+       {Host2, Port} = cow_http:parse_fullhost(Host),
+       {Path2, Qs} = cow_http:parse_fullpath(Path),
+       Version2 = cow_http:parse_version(Version),
+       Req = cowboy_req:new(FakeSocket, ?MODULE, Peer,
+               Method, Path2, Qs, Version2, Headers,
+               Host2, Port, <<>>, true, false, OnResponse),
+       case OnRequest of
+               undefined ->
+                       execute(Req, Env, Middlewares);
+               _ ->
+                       Req2 = OnRequest(Req),
+                       case cowboy_req:get(resp_state, Req2) of
+                               waiting -> execute(Req2, Env, Middlewares);
+                               _ -> ok
+                       end
+       end.
+
+-spec execute(cowboy_req:req(), cowboy_middleware:env(), [module()])
+       -> ok.
+execute(Req, _, []) ->
+       cowboy_req:ensure_response(Req, 204);
+execute(Req, Env, [Middleware|Tail]) ->
+       case Middleware:execute(Req, Env) of
+               {ok, Req2, Env2} ->
+                       execute(Req2, Env2, Tail);
+               {suspend, Module, Function, Args} ->
+                       erlang:hibernate(?MODULE, resume,
+                               [Env, Tail, Module, Function, Args]);
+               {halt, Req2} ->
+                       cowboy_req:ensure_response(Req2, 204);
+               {error, Status, Req2} ->
+                       cowboy_req:reply(Status, Req2)
+       end.
+
+-spec resume(cowboy_middleware:env(), [module()],
+       module(), module(), [any()]) -> ok.
+resume(Env, Tail, Module, Function, Args) ->
+       case apply(Module, Function, Args) of
+               {ok, Req2, Env2} ->
+                       execute(Req2, Env2, Tail);
+               {suspend, Module2, Function2, Args2} ->
+                       erlang:hibernate(?MODULE, resume,
+                               [Env, Tail, Module2, Function2, Args2]);
+               {halt, Req2} ->
+                       cowboy_req:ensure_response(Req2, 204);
+               {error, Status, Req2} ->
+                       cowboy_req:reply(Status, Req2)
+       end.
+
+%% Reply functions used by cowboy_req.
+
+-spec reply(socket(), binary(), cowboy:http_headers(), iodata()) -> ok.
+reply(Socket = {Pid, _}, Status, Headers, Body) ->
+       _ = case iolist_size(Body) of
+               0 -> Pid ! {reply, Socket, Status, Headers};
+               _ -> Pid ! {reply, Socket, Status, Headers, Body}
+       end,
+       ok.
+
+-spec stream_reply(socket(), binary(), cowboy:http_headers()) -> ok.
+stream_reply(Socket = {Pid, _}, Status, Headers) ->
+       _ = Pid ! {stream_reply, Socket, Status, Headers},
+       ok.
+
+-spec stream_data(socket(), iodata()) -> ok.
+stream_data(Socket = {Pid, _}, Data) ->
+       _ = Pid ! {stream_data, Socket, Data},
+       ok.
+
+-spec stream_close(socket()) -> ok.
+stream_close(Socket = {Pid, _}) ->
+       _ = Pid ! {stream_close, Socket},
+       ok.
+
+%% Internal transport functions.
+
+-spec name() -> spdy.
+name() ->
+       spdy.
+
+-spec messages() -> {spdy, spdy_closed, spdy_error}.
+messages() ->
+       {spdy, spdy_closed, spdy_error}.
+
+-spec recv(socket(), non_neg_integer(), timeout())
+       -> {ok, binary()} | {error, timeout}.
+recv(Socket = {Pid, _}, Length, Timeout) ->
+       _ = Pid ! {recv, Socket, self(), Length, Timeout},
+       receive
+               {recv, Socket, Ret} ->
+                       Ret
+       end.
+
+-spec send(socket(), iodata()) -> ok.
+send(Socket, Data) ->
+       stream_data(Socket, Data).
+
+%% We don't wait for the result of the actual sendfile call,
+%% therefore we can't know how much was actually sent.
+%% This isn't a problem as we don't use this value in Cowboy.
+-spec sendfile(socket(), file:name_all()) -> {ok, undefined}.
+sendfile(Socket = {Pid, _}, Filepath) ->
+       _ = Pid ! {sendfile, Socket, Filepath},
+       {ok, undefined}.
+
+-spec setopts({pid(), _}, list()) -> ok.
+setopts(Socket = {Pid, _}, [{active, once}]) ->
+       _ = Pid ! {active, Socket, self()},
+       ok;
+setopts(Socket = {Pid, _}, [{active, false}]) ->
+       _ = Pid ! {passive, Socket, self()},
+       ok.
diff --git a/deps/cowboy/src/cowboy_static.erl b/deps/cowboy/src/cowboy_static.erl
new file mode 100644 (file)
index 0000000..fae4568
--- /dev/null
@@ -0,0 +1,291 @@
+%% Copyright (c) 2011, Magnus Klaar <magnus.klaar@gmail.com>
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_static).
+
+-export([init/3]).
+-export([rest_init/2]).
+-export([malformed_request/2]).
+-export([forbidden/2]).
+-export([content_types_provided/2]).
+-export([resource_exists/2]).
+-export([last_modified/2]).
+-export([generate_etag/2]).
+-export([get_file/2]).
+
+-type extra_etag() :: {etag, module(), function()} | {etag, false}.
+-type extra_mimetypes() :: {mimetypes, module(), function()}
+       | {mimetypes, binary() | {binary(), binary(), [{binary(), binary()}]}}.
+-type extra() :: [extra_etag() | extra_mimetypes()].
+-type opts() :: {file | dir, string() | binary()}
+       | {file | dir, string() | binary(), extra()}
+       | {priv_file | priv_dir, atom(), string() | binary()}
+       | {priv_file | priv_dir, atom(), string() | binary(), extra()}.
+-export_type([opts/0]).
+
+-include_lib("kernel/include/file.hrl").
+
+-type state() :: {binary(), {ok, #file_info{}} | {error, atom()}, extra()}.
+
+-spec init(_, _, _) -> {upgrade, protocol, cowboy_rest}.
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_rest}.
+
+%% Resolve the file that will be sent and get its file information.
+%% If the handler is configured to manage a directory, check that the
+%% requested file is inside the configured directory.
+
+-spec rest_init(Req, opts())
+       -> {ok, Req, error | state()}
+       when Req::cowboy_req:req().
+rest_init(Req, {Name, Path}) ->
+       rest_init_opts(Req, {Name, Path, []});
+rest_init(Req, {Name, App, Path})
+               when Name =:= priv_file; Name =:= priv_dir ->
+       rest_init_opts(Req, {Name, App, Path, []});
+rest_init(Req, Opts) ->
+       rest_init_opts(Req, Opts).
+
+rest_init_opts(Req, {priv_file, App, Path, Extra}) ->
+       rest_init_info(Req, absname(priv_path(App, Path)), Extra);
+rest_init_opts(Req, {file, Path, Extra}) ->
+       rest_init_info(Req, absname(Path), Extra);
+rest_init_opts(Req, {priv_dir, App, Path, Extra}) ->
+       rest_init_dir(Req, priv_path(App, Path), Extra);
+rest_init_opts(Req, {dir, Path, Extra}) ->
+       rest_init_dir(Req, Path, Extra).
+
+priv_path(App, Path) ->
+       case code:priv_dir(App) of
+               {error, bad_name} ->
+                       error({badarg, "Can't resolve the priv_dir of application "
+                               ++ atom_to_list(App)});
+               PrivDir when is_list(Path) ->
+                       PrivDir ++ "/" ++ Path;
+               PrivDir when is_binary(Path) ->
+                       << (list_to_binary(PrivDir))/binary, $/, Path/binary >>
+       end.
+
+absname(Path) when is_list(Path) ->
+       filename:absname(list_to_binary(Path));
+absname(Path) when is_binary(Path) ->
+       filename:absname(Path).
+
+rest_init_dir(Req, Path, Extra) when is_list(Path) ->
+       rest_init_dir(Req, list_to_binary(Path), Extra);
+rest_init_dir(Req, Path, Extra) ->
+       Dir = fullpath(filename:absname(Path)),
+       {PathInfo, Req2} = cowboy_req:path_info(Req),
+       Filepath = filename:join([Dir|PathInfo]),
+       Len = byte_size(Dir),
+       case fullpath(Filepath) of
+               << Dir:Len/binary, $/, _/binary >> ->
+                       rest_init_info(Req2, Filepath, Extra);
+               _ ->
+                       {ok, Req2, error}
+       end.
+
+fullpath(Path) ->
+       fullpath(filename:split(Path), []).
+fullpath([], Acc) ->
+       filename:join(lists:reverse(Acc));
+fullpath([<<".">>|Tail], Acc) ->
+       fullpath(Tail, Acc);
+fullpath([<<"..">>|Tail], Acc=[_]) ->
+       fullpath(Tail, Acc);
+fullpath([<<"..">>|Tail], [_|Acc]) ->
+       fullpath(Tail, Acc);
+fullpath([Segment|Tail], Acc) ->
+       fullpath(Tail, [Segment|Acc]).
+
+rest_init_info(Req, Path, Extra) ->
+       Info = file:read_file_info(Path, [{time, universal}]),
+       {ok, Req, {Path, Info, Extra}}.
+
+-ifdef(TEST).
+fullpath_test_() ->
+       Tests = [
+               {<<"/home/cowboy">>, <<"/home/cowboy">>},
+               {<<"/home/cowboy">>, <<"/home/cowboy/">>},
+               {<<"/home/cowboy">>, <<"/home/cowboy/./">>},
+               {<<"/home/cowboy">>, <<"/home/cowboy/./././././.">>},
+               {<<"/home/cowboy">>, <<"/home/cowboy/abc/..">>},
+               {<<"/home/cowboy">>, <<"/home/cowboy/abc/../">>},
+               {<<"/home/cowboy">>, <<"/home/cowboy/abc/./../.">>},
+               {<<"/">>, <<"/home/cowboy/../../../../../..">>},
+               {<<"/etc/passwd">>, <<"/home/cowboy/../../etc/passwd">>}
+       ],
+       [{P, fun() -> R = fullpath(P) end} || {R, P} <- Tests].
+
+good_path_check_test_() ->
+       Tests = [
+               <<"/home/cowboy/file">>,
+               <<"/home/cowboy/file/">>,
+               <<"/home/cowboy/./file">>,
+               <<"/home/cowboy/././././././file">>,
+               <<"/home/cowboy/abc/../file">>,
+               <<"/home/cowboy/abc/../file">>,
+               <<"/home/cowboy/abc/./.././file">>
+       ],
+       [{P, fun() ->
+               case fullpath(P) of
+                       << "/home/cowboy/", _/binary >> -> ok
+               end
+       end} || P <- Tests].
+
+bad_path_check_test_() ->
+       Tests = [
+               <<"/home/cowboy/../../../../../../file">>,
+               <<"/home/cowboy/../../etc/passwd">>
+       ],
+       [{P, fun() ->
+               error = case fullpath(P) of
+                       << "/home/cowboy/", _/binary >> -> ok;
+                       _ -> error
+               end
+       end} || P <- Tests].
+
+good_path_win32_check_test_() ->
+       Tests = case os:type() of
+               {unix, _} ->
+                       [];
+               {win32, _} ->
+                       [
+                               <<"c:/home/cowboy/file">>,
+                               <<"c:/home/cowboy/file/">>,
+                               <<"c:/home/cowboy/./file">>,
+                               <<"c:/home/cowboy/././././././file">>,
+                               <<"c:/home/cowboy/abc/../file">>,
+                               <<"c:/home/cowboy/abc/../file">>,
+                               <<"c:/home/cowboy/abc/./.././file">>
+                       ]
+       end,
+       [{P, fun() ->
+               case fullpath(P) of
+                       << "c:/home/cowboy/", _/binary >> -> ok
+               end
+       end} || P <- Tests].
+
+bad_path_win32_check_test_() ->
+       Tests = case os:type() of
+               {unix, _} ->
+                       [];
+               {win32, _} ->
+                       [
+                               <<"c:/home/cowboy/../../secretfile.bat">>,
+                               <<"c:/home/cowboy/c:/secretfile.bat">>,
+                               <<"c:/home/cowboy/..\\..\\secretfile.bat">>,
+                               <<"c:/home/cowboy/c:\\secretfile.bat">>
+                       ]
+       end,
+       [{P, fun() ->
+               error = case fullpath(P) of
+                       << "c:/home/cowboy/", _/binary >> -> ok;
+                       _ -> error
+               end
+       end} || P <- Tests].
+-endif.
+
+%% Reject requests that tried to access a file outside
+%% the target directory.
+
+-spec malformed_request(Req, State)
+       -> {boolean(), Req, State}.
+malformed_request(Req, State) ->
+       {State =:= error, Req, State}.
+
+%% Directories, files that can't be accessed at all and
+%% files with no read flag are forbidden.
+
+-spec forbidden(Req, State)
+       -> {boolean(), Req, State}
+       when State::state().
+forbidden(Req, State={_, {ok, #file_info{type=directory}}, _}) ->
+       {true, Req, State};
+forbidden(Req, State={_, {error, eacces}, _}) ->
+       {true, Req, State};
+forbidden(Req, State={_, {ok, #file_info{access=Access}}, _})
+               when Access =:= write; Access =:= none ->
+       {true, Req, State};
+forbidden(Req, State) ->
+       {false, Req, State}.
+
+%% Detect the mimetype of the file.
+
+-spec content_types_provided(Req, State)
+       -> {[{binary(), get_file}], Req, State}
+       when State::state().
+content_types_provided(Req, State={Path, _, Extra}) ->
+       case lists:keyfind(mimetypes, 1, Extra) of
+               false ->
+                       {[{cow_mimetypes:web(Path), get_file}], Req, State};
+               {mimetypes, Module, Function} ->
+                       {[{Module:Function(Path), get_file}], Req, State};
+               {mimetypes, Type} ->
+                       {[{Type, get_file}], Req, State}
+       end.
+
+%% Assume the resource doesn't exist if it's not a regular file.
+
+-spec resource_exists(Req, State)
+       -> {boolean(), Req, State}
+       when State::state().
+resource_exists(Req, State={_, {ok, #file_info{type=regular}}, _}) ->
+       {true, Req, State};
+resource_exists(Req, State) ->
+       {false, Req, State}.
+
+%% Generate an etag for the file.
+
+-spec generate_etag(Req, State)
+       -> {{strong | weak, binary()}, Req, State}
+       when State::state().
+generate_etag(Req, State={Path, {ok, #file_info{size=Size, mtime=Mtime}},
+               Extra}) ->
+       case lists:keyfind(etag, 1, Extra) of
+               false ->
+                       {generate_default_etag(Size, Mtime), Req, State};
+               {etag, Module, Function} ->
+                       {Module:Function(Path, Size, Mtime), Req, State};
+               {etag, false} ->
+                       {undefined, Req, State}
+       end.
+
+generate_default_etag(Size, Mtime) ->
+       {strong, integer_to_binary(erlang:phash2({Size, Mtime}, 16#ffffffff))}.
+
+%% Return the time of last modification of the file.
+
+-spec last_modified(Req, State)
+       -> {calendar:datetime(), Req, State}
+       when State::state().
+last_modified(Req, State={_, {ok, #file_info{mtime=Modified}}, _}) ->
+       {Modified, Req, State}.
+
+%% Stream the file.
+%% @todo Export cowboy_req:resp_body_fun()?
+
+-spec get_file(Req, State)
+       -> {{stream, non_neg_integer(), fun()}, Req, State}
+       when State::state().
+get_file(Req, State={Path, {ok, #file_info{size=Size}}, _}) ->
+       Sendfile = fun (Socket, Transport) ->
+               case Transport:sendfile(Socket, Path) of
+                       {ok, _} -> ok;
+                       {error, closed} -> ok;
+                       {error, etimedout} -> ok
+               end
+       end,
+       {{stream, Size, Sendfile}, Req, State}.
diff --git a/deps/cowboy/src/cowboy_sub_protocol.erl b/deps/cowboy/src/cowboy_sub_protocol.erl
new file mode 100644 (file)
index 0000000..713c3cd
--- /dev/null
@@ -0,0 +1,23 @@
+%% Copyright (c) 2013, James Fish <james@fishcakez.com>
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_sub_protocol).
+
+-callback upgrade(Req, Env, module(), any())
+       -> {ok, Req, Env}
+       | {suspend, module(), atom(), [any()]}
+       | {halt, Req}
+       | {error, cowboy:http_status(), Req}
+       when Req::cowboy_req:req(), Env::cowboy_middleware:env().
similarity index 75%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_sup.erl
rename to deps/cowboy/src/cowboy_sup.erl
index 34591bc03581a99639ea4eb1cef9075b9c73f955..cf48595e31a0c4b46c363c01c3055dba5221b912 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-%% @private
 -module(cowboy_sup).
 -behaviour(supervisor).
 
--export([start_link/0]). %% API.
--export([init/1]). %% supervisor.
-
--define(SUPERVISOR, ?MODULE).
-
-%% API.
+-export([start_link/0]).
+-export([init/1]).
 
 -spec start_link() -> {ok, pid()}.
 start_link() ->
-       supervisor:start_link({local, ?SUPERVISOR}, ?MODULE, []).
-
-%% supervisor.
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
 
--spec init([]) -> {ok, {{one_for_one, 10, 10}, [{_, _, _, _, _, _}, ...]}}.
+-spec init([])
+       -> {ok, {{supervisor:strategy(), 10, 10}, [supervisor:child_spec()]}}.
 init([]) ->
        Procs = [{cowboy_clock, {cowboy_clock, start_link, []},
                permanent, 5000, worker, [cowboy_clock]}],
diff --git a/deps/cowboy/src/cowboy_websocket.erl b/deps/cowboy/src/cowboy_websocket.erl
new file mode 100644 (file)
index 0000000..c0f94c4
--- /dev/null
@@ -0,0 +1,770 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% Cowboy supports versions 7 through 17 of the Websocket drafts.
+%% It also supports RFC6455, the proposed standard for Websocket.
+-module(cowboy_websocket).
+-behaviour(cowboy_sub_protocol).
+
+-export([upgrade/4]).
+-export([handler_loop/4]).
+
+-type close_code() :: 1000..4999.
+-export_type([close_code/0]).
+
+-type frame() :: close | ping | pong
+       | {text | binary | close | ping | pong, iodata()}
+       | {close, close_code(), iodata()}.
+-export_type([frame/0]).
+
+-type opcode() :: 0 | 1 | 2 | 8 | 9 | 10.
+-type mask_key() :: 0..16#ffffffff.
+-type frag_state() :: undefined
+       | {nofin, opcode(), binary()} | {fin, opcode(), binary()}.
+-type rsv() :: << _:3 >>.
+-type terminate_reason() :: {normal | error | remote, atom()}
+       | {remote, close_code(), binary()}.
+
+-record(state, {
+       env :: cowboy_middleware:env(),
+       socket = undefined :: inet:socket(),
+       transport = undefined :: module(),
+       handler :: module(),
+       key = undefined :: undefined | binary(),
+       timeout = infinity :: timeout(),
+       timeout_ref = undefined :: undefined | reference(),
+       messages = undefined :: undefined | {atom(), atom(), atom()},
+       hibernate = false :: boolean(),
+       frag_state = undefined :: frag_state(),
+       utf8_state = <<>> :: binary(),
+       deflate_frame = false :: boolean(),
+       inflate_state :: undefined | port(),
+       deflate_state :: undefined | port()
+}).
+
+-spec upgrade(Req, Env, module(), any())
+       -> {ok, Req, Env}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req(), Env::cowboy_middleware:env().
+upgrade(Req, Env, Handler, HandlerOpts) ->
+       {_, Ref} = lists:keyfind(listener, 1, Env),
+       ranch:remove_connection(Ref),
+       [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+       State = #state{env=Env, socket=Socket, transport=Transport,
+               handler=Handler},
+       try websocket_upgrade(State, Req) of
+               {ok, State2, Req2} ->
+                       handler_init(State2, Req2, HandlerOpts)
+       catch _:_ ->
+               receive
+                       {cowboy_req, resp_sent} -> ok
+               after 0 ->
+                       _ = cowboy_req:reply(400, Req),
+                       exit(normal)
+               end
+       end.
+
+-spec websocket_upgrade(#state{}, Req)
+       -> {ok, #state{}, Req} when Req::cowboy_req:req().
+websocket_upgrade(State, Req) ->
+       {ok, ConnTokens, Req2}
+               = cowboy_req:parse_header(<<"connection">>, Req),
+       true = lists:member(<<"upgrade">>, ConnTokens),
+       %% @todo Should probably send a 426 if the Upgrade header is missing.
+       {ok, [<<"websocket">>], Req3}
+               = cowboy_req:parse_header(<<"upgrade">>, Req2),
+       {Version, Req4} = cowboy_req:header(<<"sec-websocket-version">>, Req3),
+       IntVersion = list_to_integer(binary_to_list(Version)),
+       true = (IntVersion =:= 7) orelse (IntVersion =:= 8)
+               orelse (IntVersion =:= 13),
+       {Key, Req5} = cowboy_req:header(<<"sec-websocket-key">>, Req4),
+       false = Key =:= undefined,
+       websocket_extensions(State#state{key=Key},
+               cowboy_req:set_meta(websocket_version, IntVersion, Req5)).
+
+-spec websocket_extensions(#state{}, Req)
+       -> {ok, #state{}, Req} when Req::cowboy_req:req().
+websocket_extensions(State, Req) ->
+       case cowboy_req:parse_header(<<"sec-websocket-extensions">>, Req) of
+               {ok, Extensions, Req2} when Extensions =/= undefined ->
+                       [Compress] = cowboy_req:get([resp_compress], Req),
+                       case lists:keyfind(<<"x-webkit-deflate-frame">>, 1, Extensions) of
+                               {<<"x-webkit-deflate-frame">>, []} when Compress =:= true ->
+                                       Inflate = zlib:open(),
+                                       Deflate = zlib:open(),
+                                       % Since we are negotiating an unconstrained deflate-frame
+                                       % then we must be willing to accept frames using the
+                                       % maximum window size which is 2^15. The negative value
+                                       % indicates that zlib headers are not used.
+                                       ok = zlib:inflateInit(Inflate, -15),
+                                       % Initialize the deflater with a window size of 2^15 bits and disable
+                                       % the zlib headers.
+                                       ok = zlib:deflateInit(Deflate, best_compression, deflated, -15, 8, default),
+                                       {ok, State#state{
+                                               deflate_frame = true,
+                                               inflate_state = Inflate,
+                                               deflate_state = Deflate
+                                       }, cowboy_req:set_meta(websocket_compress, true, Req2)};
+                               _ ->
+                                       {ok, State, cowboy_req:set_meta(websocket_compress, false, Req2)}
+                       end;
+               _ ->
+                       {ok, State, cowboy_req:set_meta(websocket_compress, false, Req)}
+       end.
+
+-spec handler_init(#state{}, Req, any())
+       -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_init(State=#state{env=Env, transport=Transport,
+               handler=Handler}, Req, HandlerOpts) ->
+       try Handler:websocket_init(Transport:name(), Req, HandlerOpts) of
+               {ok, Req2, HandlerState} ->
+                       websocket_handshake(State, Req2, HandlerState);
+               {ok, Req2, HandlerState, hibernate} ->
+                       websocket_handshake(State#state{hibernate=true},
+                               Req2, HandlerState);
+               {ok, Req2, HandlerState, Timeout} ->
+                       websocket_handshake(State#state{timeout=Timeout},
+                               Req2, HandlerState);
+               {ok, Req2, HandlerState, Timeout, hibernate} ->
+                       websocket_handshake(State#state{timeout=Timeout,
+                               hibernate=true}, Req2, HandlerState);
+               {shutdown, Req2} ->
+                       cowboy_req:ensure_response(Req2, 400),
+                       {ok, Req2, [{result, closed}|Env]}
+       catch Class:Reason ->
+               Stacktrace = erlang:get_stacktrace(),
+               cowboy_req:maybe_reply(Stacktrace, Req),
+               erlang:Class([
+                       {reason, Reason},
+                       {mfa, {Handler, websocket_init, 3}},
+                       {stacktrace, Stacktrace},
+                       {req, cowboy_req:to_list(Req)},
+                       {opts, HandlerOpts}
+               ])
+       end.
+
+-spec websocket_handshake(#state{}, Req, any())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+websocket_handshake(State=#state{
+                       transport=Transport, key=Key, deflate_frame=DeflateFrame},
+               Req, HandlerState) ->
+       Challenge = base64:encode(crypto:hash(sha,
+               << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>)),
+       Extensions = case DeflateFrame of
+               false -> [];
+               true -> [{<<"sec-websocket-extensions">>, <<"x-webkit-deflate-frame">>}]
+       end,
+       {ok, Req2} = cowboy_req:upgrade_reply(
+               101,
+               [{<<"upgrade">>, <<"websocket">>},
+                {<<"sec-websocket-accept">>, Challenge}|
+                Extensions],
+               Req),
+       %% Flush the resp_sent message before moving on.
+       receive {cowboy_req, resp_sent} -> ok after 0 -> ok end,
+       State2 = handler_loop_timeout(State),
+       handler_before_loop(State2#state{key=undefined,
+               messages=Transport:messages()}, Req2, HandlerState, <<>>).
+
+-spec handler_before_loop(#state{}, Req, any(), binary())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_before_loop(State=#state{
+                       socket=Socket, transport=Transport, hibernate=true},
+               Req, HandlerState, SoFar) ->
+       Transport:setopts(Socket, [{active, once}]),
+       {suspend, ?MODULE, handler_loop,
+               [State#state{hibernate=false}, Req, HandlerState, SoFar]};
+handler_before_loop(State=#state{socket=Socket, transport=Transport},
+               Req, HandlerState, SoFar) ->
+       Transport:setopts(Socket, [{active, once}]),
+       handler_loop(State, Req, HandlerState, SoFar).
+
+-spec handler_loop_timeout(#state{}) -> #state{}.
+handler_loop_timeout(State=#state{timeout=infinity}) ->
+       State#state{timeout_ref=undefined};
+handler_loop_timeout(State=#state{timeout=Timeout, timeout_ref=PrevRef}) ->
+       _ = case PrevRef of undefined -> ignore; PrevRef ->
+               erlang:cancel_timer(PrevRef) end,
+       TRef = erlang:start_timer(Timeout, self(), ?MODULE),
+       State#state{timeout_ref=TRef}.
+
+-spec handler_loop(#state{}, Req, any(), binary())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_loop(State=#state{socket=Socket, messages={OK, Closed, Error},
+               timeout_ref=TRef}, Req, HandlerState, SoFar) ->
+       receive
+               {OK, Socket, Data} ->
+                       State2 = handler_loop_timeout(State),
+                       websocket_data(State2, Req, HandlerState,
+                               << SoFar/binary, Data/binary >>);
+               {Closed, Socket} ->
+                       handler_terminate(State, Req, HandlerState, {error, closed});
+               {Error, Socket, Reason} ->
+                       handler_terminate(State, Req, HandlerState, {error, Reason});
+               {timeout, TRef, ?MODULE} ->
+                       websocket_close(State, Req, HandlerState, {normal, timeout});
+               {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) ->
+                       handler_loop(State, Req, HandlerState, SoFar);
+               Message ->
+                       handler_call(State, Req, HandlerState,
+                               SoFar, websocket_info, Message, fun handler_before_loop/4)
+       end.
+
+%% All frames passing through this function are considered valid,
+%% with the only exception of text and close frames with a payload
+%% which may still contain errors.
+-spec websocket_data(#state{}, Req, any(), binary())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+%% RSV bits MUST be 0 unless an extension is negotiated
+%% that defines meanings for non-zero values.
+websocket_data(State, Req, HandlerState, << _:1, Rsv:3, _/bits >>)
+               when Rsv =/= 0, State#state.deflate_frame =:= false ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% Invalid opcode. Note that these opcodes may be used by extensions.
+websocket_data(State, Req, HandlerState, << _:4, Opcode:4, _/bits >>)
+               when Opcode > 2, Opcode =/= 8, Opcode =/= 9, Opcode =/= 10 ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% Control frames MUST NOT be fragmented.
+websocket_data(State, Req, HandlerState, << 0:1, _:3, Opcode:4, _/bits >>)
+               when Opcode >= 8 ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% A frame MUST NOT use the zero opcode unless fragmentation was initiated.
+websocket_data(State=#state{frag_state=undefined}, Req, HandlerState,
+               << _:4, 0:4, _/bits >>) ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% Non-control opcode when expecting control message or next fragment.
+websocket_data(State=#state{frag_state={nofin, _, _}}, Req, HandlerState,
+               << _:4, Opcode:4, _/bits >>)
+               when Opcode =/= 0, Opcode < 8 ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% Close control frame length MUST be 0 or >= 2.
+websocket_data(State, Req, HandlerState, << _:4, 8:4, _:1, 1:7, _/bits >>) ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% Close control frame with incomplete close code. Need more data.
+websocket_data(State, Req, HandlerState,
+               Data = << _:4, 8:4, 1:1, Len:7, _/bits >>)
+               when Len > 1, byte_size(Data) < 8 ->
+       handler_before_loop(State, Req, HandlerState, Data);
+%% 7 bits payload length.
+websocket_data(State, Req, HandlerState, << Fin:1, Rsv:3/bits, Opcode:4, 1:1,
+               Len:7, MaskKey:32, Rest/bits >>)
+               when Len < 126 ->
+       websocket_data(State, Req, HandlerState,
+               Opcode, Len, MaskKey, Rest, Rsv, Fin);
+%% 16 bits payload length.
+websocket_data(State, Req, HandlerState, << Fin:1, Rsv:3/bits, Opcode:4, 1:1,
+               126:7, Len:16, MaskKey:32, Rest/bits >>)
+               when Len > 125, Opcode < 8 ->
+       websocket_data(State, Req, HandlerState,
+               Opcode, Len, MaskKey, Rest, Rsv, Fin);
+%% 63 bits payload length.
+websocket_data(State, Req, HandlerState, << Fin:1, Rsv:3/bits, Opcode:4, 1:1,
+               127:7, 0:1, Len:63, MaskKey:32, Rest/bits >>)
+               when Len > 16#ffff, Opcode < 8 ->
+       websocket_data(State, Req, HandlerState,
+               Opcode, Len, MaskKey, Rest, Rsv, Fin);
+%% When payload length is over 63 bits, the most significant bit MUST be 0.
+websocket_data(State, Req, HandlerState, << _:8, 1:1, 127:7, 1:1, _:7, _/binary >>) ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% All frames sent from the client to the server are masked.
+websocket_data(State, Req, HandlerState, << _:8, 0:1, _/bits >>) ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% For the next two clauses, it can be one of the following:
+%%
+%%  *  The minimal number of bytes MUST be used to encode the length
+%%  *  All control frames MUST have a payload length of 125 bytes or less
+websocket_data(State, Req, HandlerState, << _:9, 126:7, _:48, _/bits >>) ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+websocket_data(State, Req, HandlerState, << _:9, 127:7, _:96, _/bits >>) ->
+       websocket_close(State, Req, HandlerState, {error, badframe});
+%% Need more data.
+websocket_data(State, Req, HandlerState, Data) ->
+       handler_before_loop(State, Req, HandlerState, Data).
+
+%% Initialize or update fragmentation state.
+-spec websocket_data(#state{}, Req, any(),
+       opcode(), non_neg_integer(), mask_key(), binary(), rsv(), 0 | 1)
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+%% The opcode is only included in the first frame fragment.
+websocket_data(State=#state{frag_state=undefined}, Req, HandlerState,
+               Opcode, Len, MaskKey, Data, Rsv, 0) ->
+       websocket_payload(State#state{frag_state={nofin, Opcode, <<>>}},
+               Req, HandlerState, 0, Len, MaskKey, <<>>, 0, Data, Rsv);
+%% Subsequent frame fragments.
+websocket_data(State=#state{frag_state={nofin, _, _}}, Req, HandlerState,
+               0, Len, MaskKey, Data, Rsv, 0) ->
+       websocket_payload(State, Req, HandlerState,
+               0, Len, MaskKey, <<>>, 0, Data, Rsv);
+%% Final frame fragment.
+websocket_data(State=#state{frag_state={nofin, Opcode, SoFar}},
+               Req, HandlerState, 0, Len, MaskKey, Data, Rsv, 1) ->
+       websocket_payload(State#state{frag_state={fin, Opcode, SoFar}},
+               Req, HandlerState, 0, Len, MaskKey, <<>>, 0, Data, Rsv);
+%% Unfragmented frame.
+websocket_data(State, Req, HandlerState, Opcode, Len, MaskKey, Data, Rsv, 1) ->
+       websocket_payload(State, Req, HandlerState,
+               Opcode, Len, MaskKey, <<>>, 0, Data, Rsv).
+
+-spec websocket_payload(#state{}, Req, any(),
+       opcode(), non_neg_integer(), mask_key(), binary(), non_neg_integer(),
+       binary(), rsv())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+%% Close control frames with a payload MUST contain a valid close code.
+websocket_payload(State, Req, HandlerState,
+               Opcode=8, Len, MaskKey, <<>>, 0,
+               << MaskedCode:2/binary, Rest/bits >>, Rsv) ->
+       Unmasked = << Code:16 >> = websocket_unmask(MaskedCode, MaskKey, <<>>),
+       if      Code < 1000; Code =:= 1004; Code =:= 1005; Code =:= 1006;
+                               (Code > 1011) and (Code < 3000); Code > 4999 ->
+                       websocket_close(State, Req, HandlerState, {error, badframe});
+               true ->
+                       websocket_payload(State, Req, HandlerState,
+                               Opcode, Len - 2, MaskKey, Unmasked, byte_size(MaskedCode),
+                               Rest, Rsv)
+       end;
+%% Text frames and close control frames MUST have a payload that is valid UTF-8.
+websocket_payload(State=#state{utf8_state=Incomplete},
+               Req, HandlerState, Opcode, Len, MaskKey, Unmasked, UnmaskedLen,
+               Data, Rsv)
+               when (byte_size(Data) < Len) andalso ((Opcode =:= 1) orelse
+                       ((Opcode =:= 8) andalso (Unmasked =/= <<>>))) ->
+       Unmasked2 = websocket_unmask(Data,
+               rotate_mask_key(MaskKey, UnmaskedLen), <<>>),
+       {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, false, State),
+       case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of
+               false ->
+                       websocket_close(State2, Req, HandlerState, {error, badencoding});
+               Utf8State ->
+                       websocket_payload_loop(State2#state{utf8_state=Utf8State},
+                               Req, HandlerState, Opcode, Len - byte_size(Data), MaskKey,
+                               << Unmasked/binary, Unmasked3/binary >>,
+                               UnmaskedLen + byte_size(Data), Rsv)
+       end;
+websocket_payload(State=#state{utf8_state=Incomplete},
+               Req, HandlerState, Opcode, Len, MaskKey, Unmasked, UnmaskedLen,
+               Data, Rsv)
+               when Opcode =:= 1; (Opcode =:= 8) and (Unmasked =/= <<>>) ->
+       << End:Len/binary, Rest/bits >> = Data,
+       Unmasked2 = websocket_unmask(End,
+               rotate_mask_key(MaskKey, UnmaskedLen), <<>>),
+       {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, true, State),
+       case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of
+               <<>> ->
+                       websocket_dispatch(State2#state{utf8_state= <<>>},
+                               Req, HandlerState, Rest, Opcode,
+                               << Unmasked/binary, Unmasked3/binary >>);
+               _ ->
+                       websocket_close(State2, Req, HandlerState, {error, badencoding})
+       end;
+%% Fragmented text frames may cut payload in the middle of UTF-8 codepoints.
+websocket_payload(State=#state{frag_state={_, 1, _}, utf8_state=Incomplete},
+               Req, HandlerState, Opcode=0, Len, MaskKey, Unmasked, UnmaskedLen,
+               Data, Rsv)
+               when byte_size(Data) < Len ->
+       Unmasked2 = websocket_unmask(Data,
+               rotate_mask_key(MaskKey, UnmaskedLen), <<>>),
+       {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, false, State),
+       case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of
+               false ->
+                       websocket_close(State2, Req, HandlerState, {error, badencoding});
+               Utf8State ->
+                       websocket_payload_loop(State2#state{utf8_state=Utf8State},
+                               Req, HandlerState, Opcode, Len - byte_size(Data), MaskKey,
+                               << Unmasked/binary, Unmasked3/binary >>,
+                               UnmaskedLen + byte_size(Data), Rsv)
+       end;
+websocket_payload(State=#state{frag_state={Fin, 1, _}, utf8_state=Incomplete},
+               Req, HandlerState, Opcode=0, Len, MaskKey, Unmasked, UnmaskedLen,
+               Data, Rsv) ->
+       << End:Len/binary, Rest/bits >> = Data,
+       Unmasked2 = websocket_unmask(End,
+               rotate_mask_key(MaskKey, UnmaskedLen), <<>>),
+       {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, Fin =:= fin, State),
+       case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of
+               <<>> ->
+                       websocket_dispatch(State2#state{utf8_state= <<>>},
+                               Req, HandlerState, Rest, Opcode,
+                               << Unmasked/binary, Unmasked3/binary >>);
+               Utf8State when is_binary(Utf8State), Fin =:= nofin ->
+                       websocket_dispatch(State2#state{utf8_state=Utf8State},
+                               Req, HandlerState, Rest, Opcode,
+                               << Unmasked/binary, Unmasked3/binary >>);
+               _ ->
+                       websocket_close(State, Req, HandlerState, {error, badencoding})
+       end;
+%% Other frames have a binary payload.
+websocket_payload(State, Req, HandlerState,
+               Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Data, Rsv)
+               when byte_size(Data) < Len ->
+       Unmasked2 = websocket_unmask(Data,
+               rotate_mask_key(MaskKey, UnmaskedLen), <<>>),
+       {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, false, State),
+       websocket_payload_loop(State2, Req, HandlerState,
+               Opcode, Len - byte_size(Data), MaskKey,
+               << Unmasked/binary, Unmasked3/binary >>, UnmaskedLen + byte_size(Data),
+               Rsv);
+websocket_payload(State, Req, HandlerState,
+               Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Data, Rsv) ->
+       << End:Len/binary, Rest/bits >> = Data,
+       Unmasked2 = websocket_unmask(End,
+               rotate_mask_key(MaskKey, UnmaskedLen), <<>>),
+       {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, true, State),
+       websocket_dispatch(State2, Req, HandlerState, Rest, Opcode,
+               << Unmasked/binary, Unmasked3/binary >>).
+
+-spec websocket_inflate_frame(binary(), rsv(), boolean(), #state{}) ->
+               {binary(), #state{}}.
+websocket_inflate_frame(Data, << Rsv1:1, _:2 >>, _,
+               #state{deflate_frame = DeflateFrame} = State)
+               when DeflateFrame =:= false orelse Rsv1 =:= 0 ->
+       {Data, State};
+websocket_inflate_frame(Data, << 1:1, _:2 >>, false, State) ->
+       Result = zlib:inflate(State#state.inflate_state, Data),
+       {iolist_to_binary(Result), State};
+websocket_inflate_frame(Data, << 1:1, _:2 >>, true, State) ->
+       Result = zlib:inflate(State#state.inflate_state,
+               << Data/binary, 0:8, 0:8, 255:8, 255:8 >>),
+       {iolist_to_binary(Result), State}.
+
+-spec websocket_unmask(B, mask_key(), B) -> B when B::binary().
+websocket_unmask(<<>>, _, Unmasked) ->
+       Unmasked;
+websocket_unmask(<< O:32, Rest/bits >>, MaskKey, Acc) ->
+       T = O bxor MaskKey,
+       websocket_unmask(Rest, MaskKey, << Acc/binary, T:32 >>);
+websocket_unmask(<< O:24 >>, MaskKey, Acc) ->
+       << MaskKey2:24, _:8 >> = << MaskKey:32 >>,
+       T = O bxor MaskKey2,
+       << Acc/binary, T:24 >>;
+websocket_unmask(<< O:16 >>, MaskKey, Acc) ->
+       << MaskKey2:16, _:16 >> = << MaskKey:32 >>,
+       T = O bxor MaskKey2,
+       << Acc/binary, T:16 >>;
+websocket_unmask(<< O:8 >>, MaskKey, Acc) ->
+       << MaskKey2:8, _:24 >> = << MaskKey:32 >>,
+       T = O bxor MaskKey2,
+       << Acc/binary, T:8 >>.
+
+%% Because we unmask on the fly we need to continue from the right mask byte.
+-spec rotate_mask_key(mask_key(), non_neg_integer()) -> mask_key().
+rotate_mask_key(MaskKey, UnmaskedLen) ->
+       Left = UnmaskedLen rem 4,
+       Right = 4 - Left,
+       (MaskKey bsl (Left * 8)) + (MaskKey bsr (Right * 8)).
+
+%% Returns <<>> if the argument is valid UTF-8, false if not,
+%% or the incomplete part of the argument if we need more data.
+-spec is_utf8(binary()) -> false | binary().
+is_utf8(Valid = <<>>) ->
+       Valid;
+is_utf8(<< _/utf8, Rest/binary >>) ->
+       is_utf8(Rest);
+%% 2 bytes. Codepages C0 and C1 are invalid; fail early.
+is_utf8(<< 2#1100000:7, _/bits >>) ->
+       false;
+is_utf8(Incomplete = << 2#110:3, _:5 >>) ->
+       Incomplete;
+%% 3 bytes.
+is_utf8(Incomplete = << 2#1110:4, _:4 >>) ->
+       Incomplete;
+is_utf8(Incomplete = << 2#1110:4, _:4, 2#10:2, _:6 >>) ->
+       Incomplete;
+%% 4 bytes. Codepage F4 may have invalid values greater than 0x10FFFF.
+is_utf8(<< 2#11110100:8, 2#10:2, High:6, _/bits >>) when High >= 2#10000 ->
+       false;
+is_utf8(Incomplete = << 2#11110:5, _:3 >>) ->
+       Incomplete;
+is_utf8(Incomplete = << 2#11110:5, _:3, 2#10:2, _:6 >>) ->
+       Incomplete;
+is_utf8(Incomplete = << 2#11110:5, _:3, 2#10:2, _:6, 2#10:2, _:6 >>) ->
+       Incomplete;
+%% Invalid.
+is_utf8(_) ->
+       false.
+
+-spec websocket_payload_loop(#state{}, Req, any(),
+               opcode(), non_neg_integer(), mask_key(), binary(),
+               non_neg_integer(), rsv())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+websocket_payload_loop(State=#state{socket=Socket, transport=Transport,
+               messages={OK, Closed, Error}, timeout_ref=TRef},
+               Req, HandlerState, Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Rsv) ->
+       Transport:setopts(Socket, [{active, once}]),
+       receive
+               {OK, Socket, Data} ->
+                       State2 = handler_loop_timeout(State),
+                       websocket_payload(State2, Req, HandlerState,
+                               Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Data, Rsv);
+               {Closed, Socket} ->
+                       handler_terminate(State, Req, HandlerState, {error, closed});
+               {Error, Socket, Reason} ->
+                       handler_terminate(State, Req, HandlerState, {error, Reason});
+               {timeout, TRef, ?MODULE} ->
+                       websocket_close(State, Req, HandlerState, {normal, timeout});
+               {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) ->
+                       websocket_payload_loop(State, Req, HandlerState,
+                               Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Rsv);
+               Message ->
+                       handler_call(State, Req, HandlerState,
+                               <<>>, websocket_info, Message,
+                               fun (State2, Req2, HandlerState2, _) ->
+                                       websocket_payload_loop(State2, Req2, HandlerState2,
+                                               Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Rsv)
+                               end)
+       end.
+
+-spec websocket_dispatch(#state{}, Req, any(), binary(), opcode(), binary())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+%% Continuation frame.
+websocket_dispatch(State=#state{frag_state={nofin, Opcode, SoFar}},
+               Req, HandlerState, RemainingData, 0, Payload) ->
+       websocket_data(State#state{frag_state={nofin, Opcode,
+               << SoFar/binary, Payload/binary >>}}, Req, HandlerState, RemainingData);
+%% Last continuation frame.
+websocket_dispatch(State=#state{frag_state={fin, Opcode, SoFar}},
+               Req, HandlerState, RemainingData, 0, Payload) ->
+       websocket_dispatch(State#state{frag_state=undefined}, Req, HandlerState,
+               RemainingData, Opcode, << SoFar/binary, Payload/binary >>);
+%% Text frame.
+websocket_dispatch(State, Req, HandlerState, RemainingData, 1, Payload) ->
+       handler_call(State, Req, HandlerState, RemainingData,
+               websocket_handle, {text, Payload}, fun websocket_data/4);
+%% Binary frame.
+websocket_dispatch(State, Req, HandlerState, RemainingData, 2, Payload) ->
+       handler_call(State, Req, HandlerState, RemainingData,
+               websocket_handle, {binary, Payload}, fun websocket_data/4);
+%% Close control frame.
+websocket_dispatch(State, Req, HandlerState, _RemainingData, 8, <<>>) ->
+       websocket_close(State, Req, HandlerState, {remote, closed});
+websocket_dispatch(State, Req, HandlerState, _RemainingData, 8,
+               << Code:16, Payload/bits >>) ->
+       websocket_close(State, Req, HandlerState, {remote, Code, Payload});
+%% Ping control frame. Send a pong back and forward the ping to the handler.
+websocket_dispatch(State=#state{socket=Socket, transport=Transport},
+               Req, HandlerState, RemainingData, 9, Payload) ->
+       Len = payload_length_to_binary(byte_size(Payload)),
+       Transport:send(Socket, << 1:1, 0:3, 10:4, 0:1, Len/bits, Payload/binary >>),
+       handler_call(State, Req, HandlerState, RemainingData,
+               websocket_handle, {ping, Payload}, fun websocket_data/4);
+%% Pong control frame.
+websocket_dispatch(State, Req, HandlerState, RemainingData, 10, Payload) ->
+       handler_call(State, Req, HandlerState, RemainingData,
+               websocket_handle, {pong, Payload}, fun websocket_data/4).
+
+-spec handler_call(#state{}, Req, any(), binary(), atom(), any(), fun())
+       -> {ok, Req, cowboy_middleware:env()}
+       | {suspend, module(), atom(), [any()]}
+       when Req::cowboy_req:req().
+handler_call(State=#state{handler=Handler}, Req, HandlerState,
+               RemainingData, Callback, Message, NextState) ->
+       try Handler:Callback(Message, Req, HandlerState) of
+               {ok, Req2, HandlerState2} ->
+                       NextState(State, Req2, HandlerState2, RemainingData);
+               {ok, Req2, HandlerState2, hibernate} ->
+                       NextState(State#state{hibernate=true},
+                               Req2, HandlerState2, RemainingData);
+               {reply, Payload, Req2, HandlerState2}
+                               when is_list(Payload) ->
+                       case websocket_send_many(Payload, State) of
+                               {ok, State2} ->
+                                       NextState(State2, Req2, HandlerState2, RemainingData);
+                               {shutdown, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2,
+                                               {normal, shutdown});
+                               {{error, _} = Error, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2, Error)
+                       end;
+               {reply, Payload, Req2, HandlerState2, hibernate}
+                               when is_list(Payload) ->
+                       case websocket_send_many(Payload, State) of
+                               {ok, State2} ->
+                                       NextState(State2#state{hibernate=true},
+                                               Req2, HandlerState2, RemainingData);
+                               {shutdown, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2,
+                                               {normal, shutdown});
+                               {{error, _} = Error, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2, Error)
+                       end;
+               {reply, Payload, Req2, HandlerState2} ->
+                       case websocket_send(Payload, State) of
+                               {ok, State2} ->
+                                       NextState(State2, Req2, HandlerState2, RemainingData);
+                               {shutdown, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2,
+                                               {normal, shutdown});
+                               {{error, _} = Error, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2, Error)
+                       end;
+               {reply, Payload, Req2, HandlerState2, hibernate} ->
+                       case websocket_send(Payload, State) of
+                               {ok, State2} ->
+                                       NextState(State2#state{hibernate=true},
+                                               Req2, HandlerState2, RemainingData);
+                               {shutdown, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2,
+                                               {normal, shutdown});
+                               {{error, _} = Error, State2} ->
+                                       handler_terminate(State2, Req2, HandlerState2, Error)
+                       end;
+               {shutdown, Req2, HandlerState2} ->
+                       websocket_close(State, Req2, HandlerState2, {normal, shutdown})
+       catch Class:Reason ->
+               _ = websocket_close(State, Req, HandlerState, {error, handler}),
+               erlang:Class([
+                       {reason, Reason},
+                       {mfa, {Handler, Callback, 3}},
+                       {stacktrace, erlang:get_stacktrace()},
+                       {msg, Message},
+                       {req, cowboy_req:to_list(Req)},
+                       {state, HandlerState}
+               ])
+       end.
+
+websocket_opcode(text) -> 1;
+websocket_opcode(binary) -> 2;
+websocket_opcode(close) -> 8;
+websocket_opcode(ping) -> 9;
+websocket_opcode(pong) -> 10.
+
+-spec websocket_deflate_frame(opcode(), binary(), #state{}) ->
+       {binary(), rsv(), #state{}}.
+websocket_deflate_frame(Opcode, Payload,
+               State=#state{deflate_frame = DeflateFrame})
+               when DeflateFrame =:= false orelse Opcode >= 8 ->
+       {Payload, << 0:3 >>, State};
+websocket_deflate_frame(_, Payload, State=#state{deflate_state = Deflate}) ->
+       Deflated = iolist_to_binary(zlib:deflate(Deflate, Payload, sync)),
+       DeflatedBodyLength = erlang:size(Deflated) - 4,
+       Deflated1 = case Deflated of
+               << Body:DeflatedBodyLength/binary, 0:8, 0:8, 255:8, 255:8 >> -> Body;
+               _ -> Deflated
+       end,
+       {Deflated1, << 1:1, 0:2 >>, State}.
+
+-spec websocket_send(frame(), #state{})
+-> {ok, #state{}} | {shutdown, #state{}} | {{error, atom()}, #state{}}.
+websocket_send(Type, State=#state{socket=Socket, transport=Transport})
+               when Type =:= close ->
+       Opcode = websocket_opcode(Type),
+       case Transport:send(Socket, << 1:1, 0:3, Opcode:4, 0:8 >>) of
+               ok -> {shutdown, State};
+               Error -> {Error, State}
+       end;
+websocket_send(Type, State=#state{socket=Socket, transport=Transport})
+               when Type =:= ping; Type =:= pong ->
+       Opcode = websocket_opcode(Type),
+       {Transport:send(Socket, << 1:1, 0:3, Opcode:4, 0:8 >>), State};
+websocket_send({close, Payload}, State) ->
+       websocket_send({close, 1000, Payload}, State);
+websocket_send({Type = close, StatusCode, Payload}, State=#state{
+               socket=Socket, transport=Transport}) ->
+       Opcode = websocket_opcode(Type),
+       Len = 2 + iolist_size(Payload),
+       %% Control packets must not be > 125 in length.
+       true = Len =< 125,
+       BinLen = payload_length_to_binary(Len),
+       Transport:send(Socket,
+               [<< 1:1, 0:3, Opcode:4, 0:1, BinLen/bits, StatusCode:16 >>, Payload]),
+       {shutdown, State};
+websocket_send({Type, Payload0}, State=#state{socket=Socket, transport=Transport}) ->
+       Opcode = websocket_opcode(Type),
+       {Payload, Rsv, State2} = websocket_deflate_frame(Opcode, iolist_to_binary(Payload0), State),
+       Len = iolist_size(Payload),
+       %% Control packets must not be > 125 in length.
+       true = if Type =:= ping; Type =:= pong ->
+                       Len =< 125;
+               true ->
+                       true
+       end,
+       BinLen = payload_length_to_binary(Len),
+       {Transport:send(Socket,
+               [<< 1:1, Rsv/bits, Opcode:4, 0:1, BinLen/bits >>, Payload]), State2}.
+
+-spec websocket_send_many([frame()], #state{})
+       -> {ok, #state{}} | {shutdown, #state{}} | {{error, atom()}, #state{}}.
+websocket_send_many([], State) ->
+       {ok, State};
+websocket_send_many([Frame|Tail], State) ->
+       case websocket_send(Frame, State) of
+               {ok, State2} -> websocket_send_many(Tail, State2);
+               {shutdown, State2} -> {shutdown, State2};
+               {Error, State2} -> {Error, State2}
+       end.
+
+-spec websocket_close(#state{}, Req, any(), terminate_reason())
+       -> {ok, Req, cowboy_middleware:env()}
+       when Req::cowboy_req:req().
+websocket_close(State=#state{socket=Socket, transport=Transport},
+               Req, HandlerState, Reason) ->
+       case Reason of
+               {normal, _} ->
+                       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1000:16 >>);
+               {error, badframe} ->
+                       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1002:16 >>);
+               {error, badencoding} ->
+                       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1007:16 >>);
+               {error, handler} ->
+                       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1011:16 >>);
+               {remote, closed} ->
+                       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>);
+               {remote, Code, _} ->
+                       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, Code:16 >>)
+       end,
+       handler_terminate(State, Req, HandlerState, Reason).
+
+-spec handler_terminate(#state{}, Req, any(), terminate_reason())
+       -> {ok, Req, cowboy_middleware:env()}
+       when Req::cowboy_req:req().
+handler_terminate(#state{env=Env, handler=Handler},
+               Req, HandlerState, TerminateReason) ->
+       try
+               Handler:websocket_terminate(TerminateReason, Req, HandlerState)
+       catch Class:Reason ->
+               erlang:Class([
+                       {reason, Reason},
+                       {mfa, {Handler, websocket_terminate, 3}},
+                       {stacktrace, erlang:get_stacktrace()},
+                       {req, cowboy_req:to_list(Req)},
+                       {state, HandlerState},
+                       {terminate_reason, TerminateReason}
+               ])
+       end,
+       {ok, Req, [{result, closed}|Env]}.
+
+-spec payload_length_to_binary(0..16#7fffffffffffffff)
+       -> << _:7 >> | << _:23 >> | << _:71 >>.
+payload_length_to_binary(N) ->
+       case N of
+               N when N =< 125 -> << N:7 >>;
+               N when N =< 16#ffff -> << 126:7, N:16 >>;
+               N when N =< 16#7fffffffffffffff -> << 127:7, N:64 >>
+       end.
diff --git a/deps/cowboy/src/cowboy_websocket_handler.erl b/deps/cowboy/src/cowboy_websocket_handler.erl
new file mode 100644 (file)
index 0000000..177e5f6
--- /dev/null
@@ -0,0 +1,50 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cowboy_websocket_handler).
+
+-type opts() :: any().
+-type state() :: any().
+-type terminate_reason() :: {normal, shutdown}
+       | {normal, timeout}
+       | {error, closed}
+       | {remote, closed}
+       | {remote, cowboy_websocket:close_code(), binary()}
+       | {error, badencoding}
+       | {error, badframe}
+       | {error, atom()}.
+
+-callback websocket_init(atom(), Req, opts())
+       -> {ok, Req, state()}
+       | {ok, Req, state(), hibernate}
+       | {ok, Req, state(), timeout()}
+       | {ok, Req, state(), timeout(), hibernate}
+       | {shutdown, Req}
+       when Req::cowboy_req:req().
+-callback websocket_handle({text | binary | ping | pong, binary()}, Req, State)
+       -> {ok, Req, State}
+       | {ok, Req, State, hibernate}
+       | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State}
+       | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State, hibernate}
+       | {shutdown, Req, State}
+       when Req::cowboy_req:req(), State::state().
+-callback websocket_info(any(), Req, State)
+       -> {ok, Req, State}
+       | {ok, Req, State, hibernate}
+       | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State}
+       | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State, hibernate}
+       | {shutdown, Req, State}
+       when Req::cowboy_req:req(), State::state().
+-callback websocket_terminate(terminate_reason(), cowboy_req:req(), state())
+       -> ok.
diff --git a/deps/cowlib/AUTHORS b/deps/cowlib/AUTHORS
new file mode 100644 (file)
index 0000000..824ec87
--- /dev/null
@@ -0,0 +1,4 @@
+Cowlib is available thanks to the work of:
+
+Loïc Hoguin
+Mikkel Jensen
diff --git a/deps/cowlib/CHANGELOG.md b/deps/cowlib/CHANGELOG.md
new file mode 100644 (file)
index 0000000..88146e9
--- /dev/null
@@ -0,0 +1,12 @@
+CHANGELOG
+=========
+
+1.0.1
+-----
+
+ *  Multipart: no line break after close delimiter
+
+1.0.0
+-----
+
+ *  Initial release.
diff --git a/deps/cowlib/LICENSE b/deps/cowlib/LICENSE
new file mode 100644 (file)
index 0000000..e43ab78
--- /dev/null
@@ -0,0 +1,13 @@
+Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/cowlib/Makefile b/deps/cowlib/Makefile
new file mode 100644 (file)
index 0000000..8b1d1b4
--- /dev/null
@@ -0,0 +1,43 @@
+# See LICENSE for licensing information.
+
+PROJECT = cowlib
+PLT_APPS = crypto
+
+include erlang.mk
+
+.PHONY: gen perfs
+
+# Mimetypes module generator.
+
+GEN_URL = http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types
+GEN_SRC = src/cow_mimetypes.erl.src
+GEN_OUT = src/cow_mimetypes.erl
+
+gen:
+       $(gen_verbose) cat $(GEN_SRC) \
+               | head -n `grep -n "%% GENERATED" $(GEN_SRC) | cut -d : -f 1` \
+               > $(GEN_OUT)
+       $(gen_verbose) wget -qO - $(GEN_URL) \
+               | grep -v ^# \
+               | awk '{for (i=2; i<=NF; i++) if ($$i != "") { \
+                       split($$1, a, "/"); \
+                       print "all_ext(<<\"" $$i "\">>) -> {<<\"" \
+                               a[1] "\">>, <<\"" a[2] "\">>, []};"}}' \
+               | sort \
+               | uniq -w 25 \
+               >> $(GEN_OUT)
+       $(gen_verbose) cat $(GEN_SRC) \
+               | tail -n +`grep -n "%% GENERATED" $(GEN_SRC) | cut -d : -f 1` \
+               >> $(GEN_OUT)
+
+# Performance testing.
+
+deps/horse:
+       git clone -n -- https://github.com/extend/horse $(DEPS_DIR)/horse
+       cd $(DEPS_DIR)/horse ; git checkout -q master
+       $(MAKE) -C $(DEPS_DIR)/horse
+
+perfs: ERLC_OPTS += -DPERF=1 +'{parse_transform, horse_autoexport}' -DEXTRA=1
+perfs: clean deps deps/horse app
+       $(gen_verbose) erl -noshell -pa ebin deps/horse/ebin \
+               -eval 'horse:app_perf($(PROJECT)), init:stop().'
diff --git a/deps/cowlib/README.md b/deps/cowlib/README.md
new file mode 100644 (file)
index 0000000..9523836
--- /dev/null
@@ -0,0 +1,20 @@
+Cowlib
+======
+
+Cowlib is a support library for manipulating Web protocols.
+
+Goals
+-----
+
+Cowlib provides libraries for parsing and building messages
+for various Web protocols, including SPDY, HTTP and Websocket.
+
+It is optimized for completeness rather than speed. No value
+is ignored, they are all returned.
+
+Support
+-------
+
+ *  Official IRC Channel: #ninenines on irc.freenode.net
+ *  [Mailing Lists](http://lists.ninenines.eu)
+ *  [Commercial Support](http://ninenines.eu/support)
diff --git a/deps/cowlib/all.sh b/deps/cowlib/all.sh
new file mode 100755 (executable)
index 0000000..fa9dd16
--- /dev/null
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+KERL_INSTALL_PATH=~/erlang
+KERL_RELEASES="r15b r15b01 r15b02 r15b03 r16b r16b01 r16b02 r16b03-1 17.0 17.1.2"
+
+make build-ct-suites
+
+for rel in $KERL_RELEASES
+do
+       echo
+       echo "    TESTING $rel"
+       echo
+       . $KERL_INSTALL_PATH/$rel/activate
+       CT_OPTS="-label $rel" make tests
+done
+
+xdg-open logs/all_runs.html
diff --git a/deps/cowlib/build.config b/deps/cowlib/build.config
new file mode 100644 (file)
index 0000000..87fd50d
--- /dev/null
@@ -0,0 +1,20 @@
+# Core modules.
+#
+# Do *not* comment or remove them
+# unless you know what you are doing!
+core/core
+core/deps
+core/erlc
+
+# Plugins.
+#
+# Comment to disable, uncomment to enable.
+plugins/bootstrap
+#plugins/c_src
+plugins/ct
+plugins/dialyzer
+#plugins/edoc
+plugins/elvis
+#plugins/erlydtl
+#plugins/relx
+plugins/shell
diff --git a/deps/cowlib/erlang.mk b/deps/cowlib/erlang.mk
new file mode 100644 (file)
index 0000000..8930dfc
--- /dev/null
@@ -0,0 +1 @@
+include ../../erlang.mk
diff --git a/deps/cowlib/include/cow_inline.hrl b/deps/cowlib/include/cow_inline.hrl
new file mode 100644 (file)
index 0000000..36a3558
--- /dev/null
@@ -0,0 +1,388 @@
+%% Copyright (c) 2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-ifndef(COW_INLINE_HRL).
+-define(COW_INLINE_HRL, 1).
+
+%% INLINE_LOWERCASE(Function, Rest, Acc, ...)
+%%
+%% To be included at the end of a case block.
+%% Defined for up to 10 extra arguments.
+
+-define(INLINE_LOWERCASE(Function, Rest, Acc),
+       $A -> Function(Rest, << Acc/binary, $a >>);
+       $B -> Function(Rest, << Acc/binary, $b >>);
+       $C -> Function(Rest, << Acc/binary, $c >>);
+       $D -> Function(Rest, << Acc/binary, $d >>);
+       $E -> Function(Rest, << Acc/binary, $e >>);
+       $F -> Function(Rest, << Acc/binary, $f >>);
+       $G -> Function(Rest, << Acc/binary, $g >>);
+       $H -> Function(Rest, << Acc/binary, $h >>);
+       $I -> Function(Rest, << Acc/binary, $i >>);
+       $J -> Function(Rest, << Acc/binary, $j >>);
+       $K -> Function(Rest, << Acc/binary, $k >>);
+       $L -> Function(Rest, << Acc/binary, $l >>);
+       $M -> Function(Rest, << Acc/binary, $m >>);
+       $N -> Function(Rest, << Acc/binary, $n >>);
+       $O -> Function(Rest, << Acc/binary, $o >>);
+       $P -> Function(Rest, << Acc/binary, $p >>);
+       $Q -> Function(Rest, << Acc/binary, $q >>);
+       $R -> Function(Rest, << Acc/binary, $r >>);
+       $S -> Function(Rest, << Acc/binary, $s >>);
+       $T -> Function(Rest, << Acc/binary, $t >>);
+       $U -> Function(Rest, << Acc/binary, $u >>);
+       $V -> Function(Rest, << Acc/binary, $v >>);
+       $W -> Function(Rest, << Acc/binary, $w >>);
+       $X -> Function(Rest, << Acc/binary, $x >>);
+       $Y -> Function(Rest, << Acc/binary, $y >>);
+       $Z -> Function(Rest, << Acc/binary, $z >>);
+       C -> Function(Rest, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, Acc),
+       $A -> Function(Rest, A0, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, Acc),
+       $A -> Function(Rest, A0, A1, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, Acc),
+       $A -> Function(Rest, A0, A1, A2, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, C >>)
+).
+
+-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, Acc),
+       $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $a >>);
+       $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $b >>);
+       $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $c >>);
+       $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $d >>);
+       $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $e >>);
+       $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $f >>);
+       $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $g >>);
+       $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $h >>);
+       $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $i >>);
+       $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $j >>);
+       $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $k >>);
+       $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $l >>);
+       $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $m >>);
+       $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $n >>);
+       $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $o >>);
+       $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $p >>);
+       $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $q >>);
+       $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $r >>);
+       $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $s >>);
+       $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $t >>);
+       $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $u >>);
+       $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $v >>);
+       $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $w >>);
+       $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $x >>);
+       $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $y >>);
+       $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $z >>);
+       C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, C >>)
+).
+
+%% INLINE_LOWERCASE_BC(Bin)
+%%
+%% Lowercase the entire binary string in a binary comprehension.
+
+-define(INLINE_LOWERCASE_BC(Bin),
+       << << case C of
+               $A -> $a;
+               $B -> $b;
+               $C -> $c;
+               $D -> $d;
+               $E -> $e;
+               $F -> $f;
+               $G -> $g;
+               $H -> $h;
+               $I -> $i;
+               $J -> $j;
+               $K -> $k;
+               $L -> $l;
+               $M -> $m;
+               $N -> $n;
+               $O -> $o;
+               $P -> $p;
+               $Q -> $q;
+               $R -> $r;
+               $S -> $s;
+               $T -> $t;
+               $U -> $u;
+               $V -> $v;
+               $W -> $w;
+               $X -> $x;
+               $Y -> $y;
+               $Z -> $z;
+               C -> C
+       end >> || << C >> <= Bin >>).
+
+-endif.
diff --git a/deps/cowlib/src/cow_cookie.erl b/deps/cowlib/src/cow_cookie.erl
new file mode 100644 (file)
index 0000000..6db89be
--- /dev/null
@@ -0,0 +1,267 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_cookie).
+
+-export([parse_cookie/1]).
+-export([setcookie/3]).
+
+-type cookie_option() :: {max_age, non_neg_integer()}
+       | {domain, binary()} | {path, binary()}
+       | {secure, boolean()} | {http_only, boolean()}.
+-type cookie_opts() :: [cookie_option()].
+-export_type([cookie_opts/0]).
+
+%% @doc Parse a cookie header string and return a list of key/values.
+
+-spec parse_cookie(binary()) -> [{binary(), binary()}] | {error, badarg}.
+parse_cookie(Cookie) ->
+       parse_cookie(Cookie, []).
+
+parse_cookie(<<>>, Acc) ->
+       lists:reverse(Acc);
+parse_cookie(<< $\s, Rest/binary >>, Acc) ->
+       parse_cookie(Rest, Acc);
+parse_cookie(<< $\t, Rest/binary >>, Acc) ->
+       parse_cookie(Rest, Acc);
+parse_cookie(<< $,, Rest/binary >>, Acc) ->
+       parse_cookie(Rest, Acc);
+parse_cookie(<< $;, Rest/binary >>, Acc) ->
+       parse_cookie(Rest, Acc);
+parse_cookie(<< $$, Rest/binary >>, Acc) ->
+       skip_cookie(Rest, Acc);
+parse_cookie(Cookie, Acc) ->
+       parse_cookie_name(Cookie, Acc, <<>>).
+
+skip_cookie(<<>>, Acc) ->
+       lists:reverse(Acc);
+skip_cookie(<< $,, Rest/binary >>, Acc) ->
+       parse_cookie(Rest, Acc);
+skip_cookie(<< $;, Rest/binary >>, Acc) ->
+       parse_cookie(Rest, Acc);
+skip_cookie(<< _, Rest/binary >>, Acc) ->
+       skip_cookie(Rest, Acc).
+
+parse_cookie_name(<<>>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $=, _/binary >>, _, <<>>) ->
+       {error, badarg};
+parse_cookie_name(<< $=, Rest/binary >>, Acc, Name) ->
+       parse_cookie_value(Rest, Acc, Name, <<>>);
+parse_cookie_name(<< $,, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $;, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $\s, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $\t, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $\r, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $\n, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $\013, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< $\014, _/binary >>, _, _) ->
+       {error, badarg};
+parse_cookie_name(<< C, Rest/binary >>, Acc, Name) ->
+       parse_cookie_name(Rest, Acc, << Name/binary, C >>).
+
+parse_cookie_value(<<>>, Acc, Name, Value) ->
+       lists:reverse([{Name, parse_cookie_trim(Value)}|Acc]);
+parse_cookie_value(<< $;, Rest/binary >>, Acc, Name, Value) ->
+       parse_cookie(Rest, [{Name, parse_cookie_trim(Value)}|Acc]);
+parse_cookie_value(<< $\t, _/binary >>, _, _, _) ->
+       {error, badarg};
+parse_cookie_value(<< $\r, _/binary >>, _, _, _) ->
+       {error, badarg};
+parse_cookie_value(<< $\n, _/binary >>, _, _, _) ->
+       {error, badarg};
+parse_cookie_value(<< $\013, _/binary >>, _, _, _) ->
+       {error, badarg};
+parse_cookie_value(<< $\014, _/binary >>, _, _, _) ->
+       {error, badarg};
+parse_cookie_value(<< C, Rest/binary >>, Acc, Name, Value) ->
+       parse_cookie_value(Rest, Acc, Name, << Value/binary, C >>).
+
+parse_cookie_trim(Value = <<>>) ->
+       Value;
+parse_cookie_trim(Value) ->
+       case binary:last(Value) of
+               $\s ->
+                       Size = byte_size(Value) - 1,
+                       << Value2:Size/binary, _ >> = Value,
+                       parse_cookie_trim(Value2);
+               _ ->
+                       Value
+       end.
+
+-ifdef(TEST).
+parse_cookie_test_() ->
+       %% {Value, Result}.
+       Tests = [
+               {<<"name=value; name2=value2">>, [
+                       {<<"name">>, <<"value">>},
+                       {<<"name2">>, <<"value2">>}
+               ]},
+               {<<"$Version=1; Customer=WILE_E_COYOTE; $Path=/acme">>, [
+                       {<<"Customer">>, <<"WILE_E_COYOTE">>}
+               ]},
+               {<<"$Version=1; Customer=WILE_E_COYOTE; $Path=/acme; "
+                       "Part_Number=Rocket_Launcher_0001; $Path=/acme; "
+                       "Shipping=FedEx; $Path=/acme">>, [
+                       {<<"Customer">>, <<"WILE_E_COYOTE">>},
+                       {<<"Part_Number">>, <<"Rocket_Launcher_0001">>},
+                       {<<"Shipping">>, <<"FedEx">>}
+               ]},
+               %% Space in value.
+               {<<"foo=Thu Jul 11 2013 15:38:43 GMT+0400 (MSK)">>,
+                       [{<<"foo">>, <<"Thu Jul 11 2013 15:38:43 GMT+0400 (MSK)">>}]},
+               %% Comma in value. Google Analytics sets that kind of cookies.
+               {<<"refk=sOUZDzq2w2; sk=B602064E0139D842D620C7569640DBB4C81C45080651"
+                       "9CC124EF794863E10E80; __utma=64249653.825741573.1380181332.1400"
+                       "015657.1400019557.703; __utmb=64249653.1.10.1400019557; __utmc="
+                       "64249653; __utmz=64249653.1400019557.703.13.utmcsr=bluesky.chic"
+                       "agotribune.com|utmccn=(referral)|utmcmd=referral|utmcct=/origin"
+                       "als/chi-12-indispensable-digital-tools-bsi,0,0.storygallery">>, [
+                               {<<"refk">>, <<"sOUZDzq2w2">>},
+                               {<<"sk">>, <<"B602064E0139D842D620C7569640DBB4C81C45080651"
+                                       "9CC124EF794863E10E80">>},
+                               {<<"__utma">>, <<"64249653.825741573.1380181332.1400"
+                                       "015657.1400019557.703">>},
+                               {<<"__utmb">>, <<"64249653.1.10.1400019557">>},
+                               {<<"__utmc">>, <<"64249653">>},
+                               {<<"__utmz">>, <<"64249653.1400019557.703.13.utmcsr=bluesky.chic"
+                                       "agotribune.com|utmccn=(referral)|utmcmd=referral|utmcct=/origin"
+                                       "als/chi-12-indispensable-digital-tools-bsi,0,0.storygallery">>}
+               ]},
+               %% Potential edge cases (initially from Mochiweb).
+               {<<"foo=\\x">>, [{<<"foo">>, <<"\\x">>}]},
+               {<<"=">>, {error, badarg}},
+               {<<"  foo ; bar  ">>, {error, badarg}},
+               {<<"foo=;bar=">>, [{<<"foo">>, <<>>}, {<<"bar">>, <<>>}]},
+               {<<"foo=\\\";;bar ">>, {error, badarg}},
+               {<<"foo=\\\";;bar=good ">>,
+                       [{<<"foo">>, <<"\\\"">>}, {<<"bar">>, <<"good">>}]},
+               {<<"foo=\"\\\";bar">>, {error, badarg}},
+               {<<>>, []},
+               {<<"foo=bar , baz=wibble ">>, [{<<"foo">>, <<"bar , baz=wibble">>}]}
+       ],
+       [{V, fun() -> R = parse_cookie(V) end} || {V, R} <- Tests].
+-endif.
+
+%% @doc Convert a cookie name, value and options to its iodata form.
+%% @end
+%%
+%% Initially from Mochiweb:
+%%   * Copyright 2007 Mochi Media, Inc.
+%% Initial binary implementation:
+%%   * Copyright 2011 Thomas Burdick <thomas.burdick@gmail.com>
+
+-spec setcookie(iodata(), iodata(), cookie_opts()) -> iodata().
+setcookie(Name, Value, Opts) ->
+       nomatch = binary:match(iolist_to_binary(Name), [<<$=>>, <<$,>>, <<$;>>,
+                       <<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]),
+       nomatch = binary:match(iolist_to_binary(Value), [<<$,>>, <<$;>>,
+                       <<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]),
+       MaxAgeBin = case lists:keyfind(max_age, 1, Opts) of
+               false -> <<>>;
+               {_, 0} ->
+                       %% MSIE requires an Expires date in the past to delete a cookie.
+                       <<"; Expires=Thu, 01-Jan-1970 00:00:01 GMT; Max-Age=0">>;
+               {_, MaxAge} when is_integer(MaxAge), MaxAge > 0 ->
+                       UTC = calendar:universal_time(),
+                       Secs = calendar:datetime_to_gregorian_seconds(UTC),
+                       Expires = calendar:gregorian_seconds_to_datetime(Secs + MaxAge),
+                       [<<"; Expires=">>, cow_date:rfc2109(Expires),
+                               <<"; Max-Age=">>, integer_to_list(MaxAge)]
+       end,
+       DomainBin = case lists:keyfind(domain, 1, Opts) of
+               false -> <<>>;
+               {_, Domain} -> [<<"; Domain=">>, Domain]
+       end,
+       PathBin = case lists:keyfind(path, 1, Opts) of
+               false -> <<>>;
+               {_, Path} -> [<<"; Path=">>, Path]
+       end,
+       SecureBin = case lists:keyfind(secure, 1, Opts) of
+               false -> <<>>;
+               {_, true} -> <<"; Secure">>
+       end,
+       HttpOnlyBin = case lists:keyfind(http_only, 1, Opts) of
+               false -> <<>>;
+               {_, true} -> <<"; HttpOnly">>
+       end,
+       [Name, <<"=">>, Value, <<"; Version=1">>,
+               MaxAgeBin, DomainBin, PathBin, SecureBin, HttpOnlyBin].
+
+-ifdef(TEST).
+setcookie_test_() ->
+       %% {Name, Value, Opts, Result}
+       Tests = [
+               {<<"Customer">>, <<"WILE_E_COYOTE">>,
+                       [{http_only, true}, {domain, <<"acme.com">>}],
+                       <<"Customer=WILE_E_COYOTE; Version=1; "
+                               "Domain=acme.com; HttpOnly">>},
+               {<<"Customer">>, <<"WILE_E_COYOTE">>,
+                       [{path, <<"/acme">>}],
+                       <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>},
+               {<<"Customer">>, <<"WILE_E_COYOTE">>,
+                       [{path, <<"/acme">>}, {badoption, <<"negatory">>}],
+                       <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>}
+       ],
+       [{R, fun() -> R = iolist_to_binary(setcookie(N, V, O)) end}
+               || {N, V, O, R} <- Tests].
+
+setcookie_max_age_test() ->
+       F = fun(N, V, O) ->
+               binary:split(iolist_to_binary(
+                       setcookie(N, V, O)), <<";">>, [global])
+       end,
+       [<<"Customer=WILE_E_COYOTE">>,
+               <<" Version=1">>,
+               <<" Expires=", _/binary>>,
+               <<" Max-Age=111">>,
+               <<" Secure">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>,
+                       [{max_age, 111}, {secure, true}]),
+       case catch F(<<"Customer">>, <<"WILE_E_COYOTE">>, [{max_age, -111}]) of
+               {'EXIT', {{case_clause, {max_age, -111}}, _}} -> ok
+       end,
+       [<<"Customer=WILE_E_COYOTE">>,
+               <<" Version=1">>,
+               <<" Expires=", _/binary>>,
+               <<" Max-Age=86417">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>,
+                        [{max_age, 86417}]),
+       ok.
+
+setcookie_failures_test_() ->
+       F = fun(N, V) ->
+               try setcookie(N, V, []) of
+                       _ ->
+                               false
+               catch _:_ ->
+                       true
+               end
+       end,
+       Tests = [
+               {<<"Na=me">>, <<"Value">>},
+               {<<"Name;">>, <<"Value">>},
+               {<<"\r\name">>, <<"Value">>},
+               {<<"Name">>, <<"Value;">>},
+               {<<"Name">>, <<"\value">>}
+       ],
+       [{iolist_to_binary(io_lib:format("{~p, ~p} failure", [N, V])),
+               fun() -> true = F(N, V) end}
+               || {N, V} <- Tests].
+-endif.
diff --git a/deps/cowlib/src/cow_date.erl b/deps/cowlib/src/cow_date.erl
new file mode 100644 (file)
index 0000000..f794c82
--- /dev/null
@@ -0,0 +1,206 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_date).
+
+-export([rfc2109/1]).
+
+%% @doc Return the date formatted according to RFC2109.
+
+-spec rfc2109(calendar:datetime()) -> binary().
+rfc2109({Date = {Y, Mo, D}, {H, Mi, S}}) ->
+       Wday = calendar:day_of_the_week(Date),
+       <<      (weekday(Wday))/binary, ", ",
+               (pad_int(D))/binary, "-",
+               (month(Mo))/binary, "-",
+               (year(Y))/binary, " ",
+               (pad_int(H))/binary, ":",
+               (pad_int(Mi))/binary, ":",
+               (pad_int(S))/binary, " GMT" >>.
+
+-ifdef(TEST).
+rfc2109_test_() ->
+       Tests = [
+               {<<"Sat, 14-May-2011 14:25:33 GMT">>, {{2011, 5, 14}, {14, 25, 33}}},
+               {<<"Sun, 01-Jan-2012 00:00:00 GMT">>, {{2012, 1,  1}, { 0,  0,  0}}}
+       ],
+       [{R, fun() -> R = rfc2109(D) end} || {R, D} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_rfc2019_20130101_000000() ->
+       horse:repeat(100000,
+               rfc2109({{2013, 1, 1}, {0, 0, 0}})
+       ).
+
+horse_rfc2019_20131231_235959() ->
+       horse:repeat(100000,
+               rfc2109({{2013, 12, 31}, {23, 59, 59}})
+       ).
+
+horse_rfc2019_12340506_070809() ->
+       horse:repeat(100000,
+               rfc2109({{1234, 5, 6}, {7, 8, 9}})
+       ).
+-endif.
+
+%% Internal.
+
+-spec pad_int(0..59) -> <<_:16>>.
+pad_int( 0) -> <<"00">>;
+pad_int( 1) -> <<"01">>;
+pad_int( 2) -> <<"02">>;
+pad_int( 3) -> <<"03">>;
+pad_int( 4) -> <<"04">>;
+pad_int( 5) -> <<"05">>;
+pad_int( 6) -> <<"06">>;
+pad_int( 7) -> <<"07">>;
+pad_int( 8) -> <<"08">>;
+pad_int( 9) -> <<"09">>;
+pad_int(10) -> <<"10">>;
+pad_int(11) -> <<"11">>;
+pad_int(12) -> <<"12">>;
+pad_int(13) -> <<"13">>;
+pad_int(14) -> <<"14">>;
+pad_int(15) -> <<"15">>;
+pad_int(16) -> <<"16">>;
+pad_int(17) -> <<"17">>;
+pad_int(18) -> <<"18">>;
+pad_int(19) -> <<"19">>;
+pad_int(20) -> <<"20">>;
+pad_int(21) -> <<"21">>;
+pad_int(22) -> <<"22">>;
+pad_int(23) -> <<"23">>;
+pad_int(24) -> <<"24">>;
+pad_int(25) -> <<"25">>;
+pad_int(26) -> <<"26">>;
+pad_int(27) -> <<"27">>;
+pad_int(28) -> <<"28">>;
+pad_int(29) -> <<"29">>;
+pad_int(30) -> <<"30">>;
+pad_int(31) -> <<"31">>;
+pad_int(32) -> <<"32">>;
+pad_int(33) -> <<"33">>;
+pad_int(34) -> <<"34">>;
+pad_int(35) -> <<"35">>;
+pad_int(36) -> <<"36">>;
+pad_int(37) -> <<"37">>;
+pad_int(38) -> <<"38">>;
+pad_int(39) -> <<"39">>;
+pad_int(40) -> <<"40">>;
+pad_int(41) -> <<"41">>;
+pad_int(42) -> <<"42">>;
+pad_int(43) -> <<"43">>;
+pad_int(44) -> <<"44">>;
+pad_int(45) -> <<"45">>;
+pad_int(46) -> <<"46">>;
+pad_int(47) -> <<"47">>;
+pad_int(48) -> <<"48">>;
+pad_int(49) -> <<"49">>;
+pad_int(50) -> <<"50">>;
+pad_int(51) -> <<"51">>;
+pad_int(52) -> <<"52">>;
+pad_int(53) -> <<"53">>;
+pad_int(54) -> <<"54">>;
+pad_int(55) -> <<"55">>;
+pad_int(56) -> <<"56">>;
+pad_int(57) -> <<"57">>;
+pad_int(58) -> <<"58">>;
+pad_int(59) -> <<"59">>.
+
+-spec weekday(1..7) -> <<_:24>>.
+weekday(1) -> <<"Mon">>;
+weekday(2) -> <<"Tue">>;
+weekday(3) -> <<"Wed">>;
+weekday(4) -> <<"Thu">>;
+weekday(5) -> <<"Fri">>;
+weekday(6) -> <<"Sat">>;
+weekday(7) -> <<"Sun">>.
+
+-spec month(1..12) -> <<_:24>>.
+month( 1) -> <<"Jan">>;
+month( 2) -> <<"Feb">>;
+month( 3) -> <<"Mar">>;
+month( 4) -> <<"Apr">>;
+month( 5) -> <<"May">>;
+month( 6) -> <<"Jun">>;
+month( 7) -> <<"Jul">>;
+month( 8) -> <<"Aug">>;
+month( 9) -> <<"Sep">>;
+month(10) -> <<"Oct">>;
+month(11) -> <<"Nov">>;
+month(12) -> <<"Dec">>.
+
+-spec year(pos_integer()) -> <<_:32>>.
+year(1970) -> <<"1970">>;
+year(1971) -> <<"1971">>;
+year(1972) -> <<"1972">>;
+year(1973) -> <<"1973">>;
+year(1974) -> <<"1974">>;
+year(1975) -> <<"1975">>;
+year(1976) -> <<"1976">>;
+year(1977) -> <<"1977">>;
+year(1978) -> <<"1978">>;
+year(1979) -> <<"1979">>;
+year(1980) -> <<"1980">>;
+year(1981) -> <<"1981">>;
+year(1982) -> <<"1982">>;
+year(1983) -> <<"1983">>;
+year(1984) -> <<"1984">>;
+year(1985) -> <<"1985">>;
+year(1986) -> <<"1986">>;
+year(1987) -> <<"1987">>;
+year(1988) -> <<"1988">>;
+year(1989) -> <<"1989">>;
+year(1990) -> <<"1990">>;
+year(1991) -> <<"1991">>;
+year(1992) -> <<"1992">>;
+year(1993) -> <<"1993">>;
+year(1994) -> <<"1994">>;
+year(1995) -> <<"1995">>;
+year(1996) -> <<"1996">>;
+year(1997) -> <<"1997">>;
+year(1998) -> <<"1998">>;
+year(1999) -> <<"1999">>;
+year(2000) -> <<"2000">>;
+year(2001) -> <<"2001">>;
+year(2002) -> <<"2002">>;
+year(2003) -> <<"2003">>;
+year(2004) -> <<"2004">>;
+year(2005) -> <<"2005">>;
+year(2006) -> <<"2006">>;
+year(2007) -> <<"2007">>;
+year(2008) -> <<"2008">>;
+year(2009) -> <<"2009">>;
+year(2010) -> <<"2010">>;
+year(2011) -> <<"2011">>;
+year(2012) -> <<"2012">>;
+year(2013) -> <<"2013">>;
+year(2014) -> <<"2014">>;
+year(2015) -> <<"2015">>;
+year(2016) -> <<"2016">>;
+year(2017) -> <<"2017">>;
+year(2018) -> <<"2018">>;
+year(2019) -> <<"2019">>;
+year(2020) -> <<"2020">>;
+year(2021) -> <<"2021">>;
+year(2022) -> <<"2022">>;
+year(2023) -> <<"2023">>;
+year(2024) -> <<"2024">>;
+year(2025) -> <<"2025">>;
+year(2026) -> <<"2026">>;
+year(2027) -> <<"2027">>;
+year(2028) -> <<"2028">>;
+year(2029) -> <<"2029">>;
+year(Year) -> list_to_binary(integer_to_list(Year)).
diff --git a/deps/cowlib/src/cow_http.erl b/deps/cowlib/src/cow_http.erl
new file mode 100644 (file)
index 0000000..f7e3cdd
--- /dev/null
@@ -0,0 +1,301 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http).
+
+%% @todo parse_request_line
+-export([parse_status_line/1]).
+-export([parse_headers/1]).
+
+-export([parse_fullhost/1]).
+-export([parse_fullpath/1]).
+-export([parse_version/1]).
+
+-export([request/4]).
+-export([version/1]).
+
+-type version() :: 'HTTP/1.0' | 'HTTP/1.1'.
+-type status() :: 100..999.
+-type headers() :: [{binary(), iodata()}].
+
+-include("cow_inline.hrl").
+
+%% @doc Parse the status line.
+
+-spec parse_status_line(binary()) -> {version(), status(), binary(), binary()}.
+parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) ->
+       {'HTTP/1.1', 200, <<"OK">>, Rest};
+parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) ->
+       {'HTTP/1.1', 404, <<"Not Found">>, Rest};
+parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) ->
+       {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest};
+parse_status_line(<< "HTTP/1.1 ", Status/bits >>) ->
+       parse_status_line(Status, 'HTTP/1.1');
+parse_status_line(<< "HTTP/1.0 ", Status/bits >>) ->
+       parse_status_line(Status, 'HTTP/1.0').
+
+parse_status_line(<< H, T, U, " ", Rest/bits >>, Version)
+               when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 ->
+       Status = (H - $0) * 100 + (T - $0) * 10 + (U - $0),
+       {Pos, _} = binary:match(Rest, <<"\r">>),
+       << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest,
+       {Version, Status, StatusStr, Rest2}.
+
+-ifdef(TEST).
+parse_status_line_test_() ->
+       Tests = [
+               {<<"HTTP/1.1 200 OK\r\nRest">>,
+                       {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}},
+               {<<"HTTP/1.0 404 Not Found\r\nRest">>,
+                       {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}},
+               {<<"HTTP/1.1 500 Something very funny here\r\nRest">>,
+                       {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}},
+               {<<"HTTP/1.1 200 \r\nRest">>,
+                       {'HTTP/1.1', 200, <<>>, <<"Rest">>}}
+       ],
+       [{V, fun() -> R = parse_status_line(V) end}
+               || {V, R} <- Tests].
+
+parse_status_line_error_test_() ->
+       Tests = [
+               <<>>,
+               <<"HTTP/1.1">>,
+               <<"HTTP/1.1 200\r\n">>,
+               <<"HTTP/1.1 200 OK">>,
+               <<"HTTP/1.1 200 OK\r">>,
+               <<"HTTP/1.1 200 OK\n">>,
+               <<"HTTP/0.9 200 OK\r\n">>,
+               <<"HTTP/1.1 42 Answer\r\n">>,
+               <<"HTTP/1.1 999999999 More than OK\r\n">>,
+               <<"content-type: text/plain\r\n">>,
+               <<0:80, "\r\n">>
+       ],
+       [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end}
+               || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_status_line_200() ->
+       horse:repeat(200000,
+               parse_status_line(<<"HTTP/1.1 200 OK\r\n">>)
+       ).
+
+horse_parse_status_line_404() ->
+       horse:repeat(200000,
+               parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>)
+       ).
+
+horse_parse_status_line_500() ->
+       horse:repeat(200000,
+               parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>)
+       ).
+
+horse_parse_status_line_other() ->
+       horse:repeat(200000,
+               parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>)
+       ).
+-endif.
+
+%% @doc Parse the list of headers.
+
+-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}.
+parse_headers(Data) ->
+       parse_header(Data, []).
+
+parse_header(<< $\r, $\n, Rest/bits >>, Acc) ->
+       {lists:reverse(Acc), Rest};
+parse_header(Data, Acc) ->
+       parse_hd_name(Data, Acc, <<>>).
+
+parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) ->
+       case C of
+               $: -> parse_hd_before_value(Rest, Acc, SoFar);
+               $\s -> parse_hd_name_ws(Rest, Acc, SoFar);
+               $\t -> parse_hd_name_ws(Rest, Acc, SoFar);
+               ?INLINE_LOWERCASE(parse_hd_name, Rest, Acc, SoFar)
+       end.
+
+parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) ->
+       case C of
+               $: -> parse_hd_before_value(Rest, Acc, Name);
+               $\s -> parse_hd_name_ws(Rest, Acc, Name);
+               $\t -> parse_hd_name_ws(Rest, Acc, Name)
+       end.
+
+parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) ->
+       parse_hd_before_value(Rest, Acc, Name);
+parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) ->
+       parse_hd_before_value(Rest, Acc, Name);
+parse_hd_before_value(Data, Acc, Name) ->
+       parse_hd_value(Data, Acc, Name, <<>>).
+
+parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) ->
+       case Rest of
+               << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
+                       parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>);
+               << $\n, Rest2/bits >> ->
+                       parse_header(Rest2, [{Name, SoFar}|Acc])
+       end;
+parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) ->
+       parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>).
+
+-ifdef(TEST).
+parse_headers_test_() ->
+       Tests = [
+               {<<"\r\nRest">>,
+                       {[], <<"Rest">>}},
+               {<<"Server: Erlang/R17\r\n"
+                       "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
+                       "Multiline-Header: why hello!\r\n"
+                               " I didn't see you all the way over there!\r\n"
+                       "Content-Length: 12\r\n"
+                       "Content-Type: text/plain\r\n"
+                       "\r\nRest">>,
+                       {[{<<"server">>, <<"Erlang/R17">>},
+                               {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>},
+                               {<<"multiline-header">>,
+                                       <<"why hello! I didn't see you all the way over there!">>},
+                               {<<"content-length">>, <<"12">>},
+                               {<<"content-type">>, <<"text/plain">>}],
+                               <<"Rest">>}}
+       ],
+       [{V, fun() -> R = parse_headers(V) end}
+               || {V, R} <- Tests].
+
+parse_headers_error_test_() ->
+       Tests = [
+               <<>>,
+               <<"\r">>,
+               <<"Malformed\r\n\r\n">>,
+               <<"content-type: text/plain\r\nMalformed\r\n\r\n">>,
+               <<"HTTP/1.1 200 OK\r\n\r\n">>,
+               <<0:80, "\r\n\r\n">>,
+               <<"content-type: text/plain\r\ncontent-length: 12\r\n">>
+       ],
+       [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end}
+               || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_headers() ->
+       horse:repeat(50000,
+               parse_headers(<<"Server: Erlang/R17\r\n"
+                       "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n"
+                       "Multiline-Header: why hello!\r\n"
+                               " I didn't see you all the way over there!\r\n"
+                       "Content-Length: 12\r\n"
+                       "Content-Type: text/plain\r\n"
+                       "\r\nRest">>)
+       ).
+-endif.
+
+%% @doc Extract host and port from a binary.
+%%
+%% Because the hostname is case insensitive it is converted
+%% to lowercase.
+
+-spec parse_fullhost(binary()) -> {binary(), undefined | non_neg_integer()}.
+parse_fullhost(Fullhost) ->
+       parse_fullhost(Fullhost, false, <<>>).
+
+parse_fullhost(<< $[, Rest/bits >>, false, <<>>) ->
+       parse_fullhost(Rest, true, << $[ >>);
+parse_fullhost(<<>>, false, Acc) ->
+       {Acc, undefined};
+%% @todo Optimize.
+parse_fullhost(<< $:, Rest/bits >>, false, Acc) ->
+       {Acc, list_to_integer(binary_to_list(Rest))};
+parse_fullhost(<< $], Rest/bits >>, true, Acc) ->
+       parse_fullhost(Rest, false, << Acc/binary, $] >>);
+parse_fullhost(<< C, Rest/bits >>, E, Acc) ->
+       case C of
+               ?INLINE_LOWERCASE(parse_fullhost, Rest, E, Acc)
+       end.
+
+-ifdef(TEST).
+parse_fullhost_test() ->
+       {<<"example.org">>, 8080} = parse_fullhost(<<"example.org:8080">>),
+       {<<"example.org">>, undefined} = parse_fullhost(<<"example.org">>),
+       {<<"192.0.2.1">>, 8080} = parse_fullhost(<<"192.0.2.1:8080">>),
+       {<<"192.0.2.1">>, undefined} = parse_fullhost(<<"192.0.2.1">>),
+       {<<"[2001:db8::1]">>, 8080} = parse_fullhost(<<"[2001:db8::1]:8080">>),
+       {<<"[2001:db8::1]">>, undefined} = parse_fullhost(<<"[2001:db8::1]">>),
+       {<<"[::ffff:192.0.2.1]">>, 8080}
+               = parse_fullhost(<<"[::ffff:192.0.2.1]:8080">>),
+       {<<"[::ffff:192.0.2.1]">>, undefined}
+               = parse_fullhost(<<"[::ffff:192.0.2.1]">>),
+       ok.
+-endif.
+
+%% @doc Extract path and query string from a binary.
+
+-spec parse_fullpath(binary()) -> {binary(), binary()}.
+parse_fullpath(Fullpath) ->
+       parse_fullpath(Fullpath, <<>>).
+
+parse_fullpath(<<>>, Path) ->
+       {Path, <<>>};
+parse_fullpath(<< $?, Qs/binary >>, Path) ->
+       {Path, Qs};
+parse_fullpath(<< C, Rest/binary >>, SoFar) ->
+       parse_fullpath(Rest, << SoFar/binary, C >>).
+
+-ifdef(TEST).
+parse_fullpath_test() ->
+       {<<"*">>, <<>>} = parse_fullpath(<<"*">>),
+       {<<"/">>, <<>>} = parse_fullpath(<<"/">>),
+       {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>),
+       {<<"/">>, <<>>} = parse_fullpath(<<"/?">>),
+       {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>),
+       {<<"/path/to/resource">>, <<"q=cowboy">>}
+               = parse_fullpath(<<"/path/to/resource?q=cowboy">>),
+       ok.
+-endif.
+
+%% @doc Convert an HTTP version to atom.
+
+-spec parse_version(binary()) -> version().
+parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1';
+parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'.
+
+-ifdef(TEST).
+parse_version_test() ->
+       'HTTP/1.1' = parse_version(<<"HTTP/1.1">>),
+       'HTTP/1.0' = parse_version(<<"HTTP/1.0">>),
+       {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)),
+       ok.
+-endif.
+
+%% @doc Return formatted request-line and headers.
+%% @todo Add tests when the corresponding reverse functions are added.
+
+-spec request(binary(), iodata(), version(), headers()) -> iodata().
+request(Method, Path, Version, Headers) ->
+       [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>,
+               [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers],
+               <<"\r\n">>].
+
+%% @doc Return the version as a binary.
+
+-spec version(version()) -> binary().
+version('HTTP/1.1') -> <<"HTTP/1.1">>;
+version('HTTP/1.0') -> <<"HTTP/1.0">>.
+
+-ifdef(TEST).
+version_test() ->
+       <<"HTTP/1.1">> = version('HTTP/1.1'),
+       <<"HTTP/1.0">> = version('HTTP/1.0'),
+       {'EXIT', _} = (catch version('HTTP/1.2')),
+       ok.
+-endif.
diff --git a/deps/cowlib/src/cow_http_hd.erl b/deps/cowlib/src/cow_http_hd.erl
new file mode 100644 (file)
index 0000000..35cf2f4
--- /dev/null
@@ -0,0 +1,194 @@
+%% Copyright (c) 2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http_hd).
+
+-export([parse_connection/1]).
+-export([parse_content_length/1]).
+-export([parse_transfer_encoding/1]).
+
+-include("cow_inline.hrl").
+
+%% @doc Parse the Connection header.
+
+-spec parse_connection(binary()) -> [binary()].
+parse_connection(<<"close">>) ->
+       [<<"close">>];
+parse_connection(<<"keep-alive">>) ->
+       [<<"keep-alive">>];
+parse_connection(Connection) ->
+       nonempty(token_ci_list(Connection, [])).
+
+-ifdef(TEST).
+parse_connection_test_() ->
+       Tests = [
+               {<<"close">>, [<<"close">>]},
+               {<<"ClOsE">>, [<<"close">>]},
+               {<<"Keep-Alive">>, [<<"keep-alive">>]},
+               {<<"keep-alive, Upgrade">>, [<<"keep-alive">>, <<"upgrade">>]}
+       ],
+       [{V, fun() -> R = parse_connection(V) end} || {V, R} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_connection_close() ->
+       horse:repeat(200000,
+               parse_connection(<<"close">>)
+       ).
+
+horse_parse_connection_keepalive() ->
+       horse:repeat(200000,
+               parse_connection(<<"keep-alive">>)
+       ).
+
+horse_parse_connection_keepalive_upgrade() ->
+       horse:repeat(200000,
+               parse_connection(<<"keep-alive, upgrade">>)
+       ).
+-endif.
+
+%% @doc Parse the Content-Length header.
+%%
+%% The value has at least one digit, and may be followed by whitespace.
+
+-spec parse_content_length(binary()) -> non_neg_integer().
+parse_content_length(<< $0 >>) -> 0;
+parse_content_length(<< $0, R/bits >>) -> number(R, 0);
+parse_content_length(<< $1, R/bits >>) -> number(R, 1);
+parse_content_length(<< $2, R/bits >>) -> number(R, 2);
+parse_content_length(<< $3, R/bits >>) -> number(R, 3);
+parse_content_length(<< $4, R/bits >>) -> number(R, 4);
+parse_content_length(<< $5, R/bits >>) -> number(R, 5);
+parse_content_length(<< $6, R/bits >>) -> number(R, 6);
+parse_content_length(<< $7, R/bits >>) -> number(R, 7);
+parse_content_length(<< $8, R/bits >>) -> number(R, 8);
+parse_content_length(<< $9, R/bits >>) -> number(R, 9).
+
+-ifdef(TEST).
+parse_content_length_test_() ->
+       Tests = [
+               {<<"0">>, 0},
+               {<<"42    ">>, 42},
+               {<<"69\t">>, 69},
+               {<<"1337">>, 1337},
+               {<<"1234567890">>, 1234567890},
+               {<<"1234567890     ">>, 1234567890}
+       ],
+       [{V, fun() -> R = parse_content_length(V) end} || {V, R} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_content_length_zero() ->
+       horse:repeat(100000,
+               parse_content_length(<<"0">>)
+       ).
+
+horse_parse_content_length_giga() ->
+       horse:repeat(100000,
+               parse_content_length(<<"1234567890">>)
+       ).
+-endif.
+
+%% @doc Parse the Transfer-Encoding header.
+%%
+%% @todo Extension parameters.
+
+-spec parse_transfer_encoding(binary()) -> [binary()].
+parse_transfer_encoding(<<"chunked">>) ->
+       [<<"chunked">>];
+parse_transfer_encoding(TransferEncoding) ->
+       nonempty(token_ci_list(TransferEncoding, [])).
+
+-ifdef(TEST).
+parse_transfer_encoding_test_() ->
+       Tests = [
+               {<<"a , , , ">>, [<<"a">>]},
+               {<<" , , , a">>, [<<"a">>]},
+               {<<"a , , b">>, [<<"a">>, <<"b">>]},
+               {<<"chunked">>, [<<"chunked">>]},
+               {<<"chunked, something">>, [<<"chunked">>, <<"something">>]}
+       ],
+       [{V, fun() -> R = parse_transfer_encoding(V) end} || {V, R} <- Tests].
+
+parse_transfer_encoding_error_test_() ->
+       Tests = [
+               <<>>,
+               <<" ">>,
+               <<" , ">>,
+               <<",,,">>,
+               <<"a b">>
+       ],
+       [{V, fun() -> {'EXIT', _} = (catch parse_transfer_encoding(V)) end}
+               || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_transfer_encoding_chunked() ->
+       horse:repeat(200000,
+               parse_transfer_encoding(<<"chunked">>)
+       ).
+
+horse_parse_transfer_encoding_custom() ->
+       horse:repeat(200000,
+               parse_transfer_encoding(<<"chunked, something">>)
+       ).
+-endif.
+
+%% Internal.
+
+%% Only return if the list is not empty.
+nonempty(L) when L =/= [] -> L.
+
+%% Parse a number optionally followed by whitespace.
+number(<< $0, R/bits >>, Acc) -> number(R, Acc * 10);
+number(<< $1, R/bits >>, Acc) -> number(R, Acc * 10 + 1);
+number(<< $2, R/bits >>, Acc) -> number(R, Acc * 10 + 2);
+number(<< $3, R/bits >>, Acc) -> number(R, Acc * 10 + 3);
+number(<< $4, R/bits >>, Acc) -> number(R, Acc * 10 + 4);
+number(<< $5, R/bits >>, Acc) -> number(R, Acc * 10 + 5);
+number(<< $6, R/bits >>, Acc) -> number(R, Acc * 10 + 6);
+number(<< $7, R/bits >>, Acc) -> number(R, Acc * 10 + 7);
+number(<< $8, R/bits >>, Acc) -> number(R, Acc * 10 + 8);
+number(<< $9, R/bits >>, Acc) -> number(R, Acc * 10 + 9);
+number(<< $\s, R/bits >>, Acc) -> ws_end(R), Acc;
+number(<< $\t, R/bits >>, Acc) -> ws_end(R), Acc;
+number(<<>>, Acc) -> Acc.
+
+ws_end(<< $\s, R/bits >>) -> ws_end(R);
+ws_end(<< $\t, R/bits >>) -> ws_end(R);
+ws_end(<<>>) -> ok.
+
+%% Parse a list of case insensitive tokens.
+token_ci_list(<<>>, Acc) -> lists:reverse(Acc);
+token_ci_list(<< $\s, R/bits >>, Acc) -> token_ci_list(R, Acc);
+token_ci_list(<< $\t, R/bits >>, Acc) -> token_ci_list(R, Acc);
+token_ci_list(<< $,, R/bits >>, Acc) -> token_ci_list(R, Acc);
+token_ci_list(<< C, R/bits >>, Acc) ->
+       case C of
+               ?INLINE_LOWERCASE(token_ci_list, R, Acc, <<>>)
+       end.
+
+token_ci_list(<<>>, Acc, T) -> lists:reverse([T|Acc]);
+token_ci_list(<< $\s, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T);
+token_ci_list(<< $\t, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T);
+token_ci_list(<< $,, R/bits >>, Acc, T) -> token_ci_list(R, [T|Acc]);
+token_ci_list(<< C, R/bits >>, Acc, T) ->
+       case C of
+               ?INLINE_LOWERCASE(token_ci_list, R, Acc, T)
+       end.
+
+token_ci_list_sep(<<>>, Acc, T) -> lists:reverse([T|Acc]);
+token_ci_list_sep(<< $\s, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T);
+token_ci_list_sep(<< $\t, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T);
+token_ci_list_sep(<< $,, R/bits >>, Acc, T) -> token_ci_list(R, [T|Acc]).
diff --git a/deps/cowlib/src/cow_http_te.erl b/deps/cowlib/src/cow_http_te.erl
new file mode 100644 (file)
index 0000000..5ab71f4
--- /dev/null
@@ -0,0 +1,327 @@
+%% Copyright (c) 2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_http_te).
+
+%% Identity.
+-export([stream_identity/2]).
+-export([identity/1]).
+
+%% Chunked.
+-export([stream_chunked/2]).
+-export([chunk/1]).
+-export([last_chunk/0]).
+
+%% The state type is the same for both identity and chunked.
+-type state() :: {non_neg_integer(), non_neg_integer()}.
+
+-type decode_ret() :: more
+       | {more, Data::binary(), state()}
+       | {more, Data::binary(), RemLen::non_neg_integer(), state()}
+       | {more, Data::binary(), Rest::binary(), state()}
+       | {done, TotalLen::non_neg_integer(), Rest::binary()}
+       | {done, Data::binary(), TotalLen::non_neg_integer(), Rest::binary()}.
+-export_type([decode_ret/0]).
+
+-ifdef(EXTRA).
+dripfeed(<< C, Rest/bits >>, Acc, State, F) ->
+       case F(<< Acc/binary, C >>, State) of
+               more ->
+                       dripfeed(Rest, << Acc/binary, C >>, State, F);
+               {more, _, State2} ->
+                       dripfeed(Rest, <<>>, State2, F);
+               {more, _, Length, State2} when is_integer(Length) ->
+                       dripfeed(Rest, <<>>, State2, F);
+               {more, _, Acc2, State2} ->
+                       dripfeed(Rest, Acc2, State2, F);
+               {done, _, <<>>} ->
+                       ok;
+               {done, _, _, <<>>} ->
+                       ok
+       end.
+-endif.
+
+%% Identity.
+
+%% @doc Decode an identity stream.
+
+-spec stream_identity(Data, State)
+       -> {more, Data, Len, State} | {done, Data, Len, Data}
+       when Data::binary(), State::state(), Len::non_neg_integer().
+stream_identity(Data, {Streamed, Total}) ->
+       Streamed2 = Streamed + byte_size(Data),
+       if
+               Streamed2 < Total ->
+                       {more, Data, Total - Streamed2, {Streamed2, Total}};
+               true ->
+                       Size = Total - Streamed,
+                       << Data2:Size/binary, Rest/bits >> = Data,
+                       {done, Data2, Total, Rest}
+       end.
+
+-spec identity(Data) -> Data when Data::iodata().
+identity(Data) ->
+       Data.
+
+-ifdef(TEST).
+stream_identity_test() ->
+       {done, <<>>, 0, <<>>}
+               = stream_identity(identity(<<>>), {0, 0}),
+       {done, <<"\r\n">>, 2, <<>>}
+               = stream_identity(identity(<<"\r\n">>), {0, 2}),
+       {done, << 0:80000 >>, 10000, <<>>}
+               = stream_identity(identity(<< 0:80000 >>), {0, 10000}),
+       ok.
+
+stream_identity_parts_test() ->
+       {more, << 0:8000 >>, 1999, S1}
+               = stream_identity(<< 0:8000 >>, {0, 2999}),
+       {more, << 0:8000 >>, 999, S2}
+               = stream_identity(<< 0:8000 >>, S1),
+       {done, << 0:7992 >>, 2999, <<>>}
+               = stream_identity(<< 0:7992 >>, S2),
+       ok.
+-endif.
+
+-ifdef(PERF).
+%% Using the same data as the chunked one for comparison.
+
+horse_stream_identity() ->
+       horse:repeat(10000,
+               stream_identity(<<
+                       "4\r\n"
+                       "Wiki\r\n"
+                       "5\r\n"
+                       "pedia\r\n"
+                       "e\r\n"
+                       " in\r\n\r\nchunks.\r\n"
+                       "0\r\n"
+                       "\r\n">>, {0, 43})
+       ).
+
+horse_stream_identity_dripfeed() ->
+       horse:repeat(10000,
+               dripfeed(<<
+                       "4\r\n"
+                       "Wiki\r\n"
+                       "5\r\n"
+                       "pedia\r\n"
+                       "e\r\n"
+                       " in\r\n\r\nchunks.\r\n"
+                       "0\r\n"
+                       "\r\n">>, <<>>, {0, 43}, fun stream_identity/2)
+       ).
+-endif.
+
+%% Chunked.
+
+%% @doc Decode a chunked stream.
+
+-spec stream_chunked(Data, State)
+       -> more | {more, Data, State} | {more, Data, Len, State}
+       | {more, Data, Data, State}
+       | {done, Len, Data} | {done, Data, Len, Data}
+       when Data::binary(), State::state(), Len::non_neg_integer().
+stream_chunked(Data, State) ->
+       stream_chunked(Data, State, <<>>).
+
+%% New chunk.
+stream_chunked(Data = << C, _/bits >>, {0, Streamed}, Acc) when C =/= $\r ->
+       case chunked_len(Data, Streamed, Acc, 0) of
+               {next, Rest, State, Acc2} ->
+                       stream_chunked(Rest, State, Acc2);
+               {more, State, Acc2} ->
+                       {more, Acc2, Data, State};
+               Ret ->
+                       Ret
+       end;
+%% Trailing \r\n before next chunk.
+stream_chunked(<< "\r\n", Rest/bits >>, {2, Streamed}, Acc) ->
+       stream_chunked(Rest, {0, Streamed}, Acc);
+%% Trailing \r before next chunk.
+stream_chunked(<< "\r" >>, {2, Streamed}, Acc) ->
+       {more, Acc, {1, Streamed}};
+%% Trailing \n before next chunk.
+stream_chunked(<< "\n", Rest/bits >>, {1, Streamed}, Acc) ->
+       stream_chunked(Rest, {0, Streamed}, Acc);
+%% More data needed.
+stream_chunked(<<>>, State = {Rem, _}, Acc) ->
+       {more, Acc, Rem, State};
+%% Chunk data.
+stream_chunked(Data, {Rem, Streamed}, Acc) when Rem > 2 ->
+       DataSize = byte_size(Data),
+       RemSize = Rem - 2,
+       case Data of
+               << Chunk:RemSize/binary, "\r\n", Rest/bits >> ->
+                       stream_chunked(Rest, {0, Streamed + RemSize}, << Acc/binary, Chunk/binary >>);
+               << Chunk:RemSize/binary, "\r" >> ->
+                       {more, << Acc/binary, Chunk/binary >>, {1, Streamed + RemSize}};
+               %% Everything in Data is part of the chunk.
+               _ ->
+                       Rem2 = Rem - DataSize,
+                       {more, << Acc/binary, Data/binary >>, Rem2, {Rem2, Streamed + DataSize}}
+       end.
+
+chunked_len(<< $0, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16);
+chunked_len(<< $1, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 1);
+chunked_len(<< $2, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 2);
+chunked_len(<< $3, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 3);
+chunked_len(<< $4, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 4);
+chunked_len(<< $5, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 5);
+chunked_len(<< $6, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 6);
+chunked_len(<< $7, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 7);
+chunked_len(<< $8, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 8);
+chunked_len(<< $9, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 9);
+chunked_len(<< $A, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
+chunked_len(<< $B, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
+chunked_len(<< $C, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
+chunked_len(<< $D, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
+chunked_len(<< $E, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
+chunked_len(<< $F, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
+chunked_len(<< $a, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
+chunked_len(<< $b, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
+chunked_len(<< $c, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
+chunked_len(<< $d, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
+chunked_len(<< $e, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
+chunked_len(<< $f, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
+%% Final chunk.
+chunked_len(<< "\r\n\r\n", R/bits >>, S, <<>>, 0) -> {done, S, R};
+chunked_len(<< "\r\n\r\n", R/bits >>, S, A, 0) -> {done, A, S, R};
+chunked_len(_, _, _, 0) -> more;
+%% Normal chunk. Add 2 to Len for the trailing \r\n.
+chunked_len(<< "\r\n", R/bits >>, S, A, Len) -> {next, R, {Len + 2, S}, A};
+chunked_len(<<"\r">>, _, <<>>, _) -> more;
+chunked_len(<<"\r">>, S, A, _) -> {more, {0, S}, A};
+chunked_len(<<>>, _, <<>>, _) -> more;
+chunked_len(<<>>, S, A, _) -> {more, {0, S}, A}.
+
+%% @doc Encode a chunk.
+
+-spec chunk(D) -> D when D::iodata().
+chunk(Data) ->
+       [integer_to_list(iolist_size(Data), 16), <<"\r\n">>,
+               Data, <<"\r\n">>].
+
+%% @doc Encode the last chunk of a chunked stream.
+
+-spec last_chunk() -> << _:40 >>.
+last_chunk() ->
+       <<"0\r\n\r\n">>.
+
+-ifdef(TEST).
+stream_chunked_identity_test() ->
+       {done, <<"Wikipedia in\r\n\r\nchunks.">>, 23, <<>>}
+               = stream_chunked(iolist_to_binary([
+                       chunk("Wiki"),
+                       chunk("pedia"),
+                       chunk(" in\r\n\r\nchunks."),
+                       last_chunk()
+               ]), {0, 0}),
+       ok.
+
+stream_chunked_one_pass_test() ->
+       {done, 0, <<>>} = stream_chunked(<<"0\r\n\r\n">>, {0, 0}),
+       {done, <<"Wikipedia in\r\n\r\nchunks.">>, 23, <<>>}
+               = stream_chunked(<<
+                       "4\r\n"
+                       "Wiki\r\n"
+                       "5\r\n"
+                       "pedia\r\n"
+                       "e\r\n"
+                       " in\r\n\r\nchunks.\r\n"
+                       "0\r\n"
+                       "\r\n">>, {0, 0}),
+       ok.
+
+stream_chunked_n_passes_test() ->
+       S0 = {0, 0},
+       more = stream_chunked(<<"4\r">>, S0),
+       {more, <<>>, 6, S1} = stream_chunked(<<"4\r\n">>, S0),
+       {more, <<"Wiki">>, 0, S2} = stream_chunked(<<"Wiki\r\n">>, S1),
+       {more, <<"pedia">>, <<"e\r">>, S3} = stream_chunked(<<"5\r\npedia\r\ne\r">>, S2),
+       {more, <<" in\r\n\r\nchunks.">>, 2, S4} = stream_chunked(<<"e\r\n in\r\n\r\nchunks.">>, S3),
+       {done, 23, <<>>} = stream_chunked(<<"\r\n0\r\n\r\n">>, S4),
+       %% A few extra for coverage purposes.
+       more = stream_chunked(<<"\n3">>, {1, 0}),
+       {more, <<"abc">>, 2, {2, 3}} = stream_chunked(<<"\n3\r\nabc">>, {1, 0}),
+       {more, <<"abc">>, {1, 3}} = stream_chunked(<<"3\r\nabc\r">>, {0, 0}),
+       {more, <<"abc">>, <<"123">>, {0, 3}} = stream_chunked(<<"3\r\nabc\r\n123">>, {0, 0}),
+       ok.
+
+stream_chunked_dripfeed_test() ->
+       dripfeed(<<
+               "4\r\n"
+               "Wiki\r\n"
+               "5\r\n"
+               "pedia\r\n"
+               "e\r\n"
+               " in\r\n\r\nchunks.\r\n"
+               "0\r\n"
+               "\r\n">>, <<>>, {0, 0}, fun stream_chunked/2).
+
+do_body_to_chunks(_, <<>>, Acc) ->
+       lists:reverse([<<"0\r\n\r\n">>|Acc]);
+do_body_to_chunks(ChunkSize, Body, Acc) ->
+       BodySize = byte_size(Body),
+       ChunkSize2 = case BodySize < ChunkSize of
+               true -> BodySize;
+               false -> ChunkSize
+       end,
+       << Chunk:ChunkSize2/binary, Rest/binary >> = Body,
+       ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)),
+       do_body_to_chunks(ChunkSize, Rest,
+               [<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]).
+
+stream_chunked_dripfeed2_test() ->
+       Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])),
+       Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])),
+       dripfeed(Body2, <<>>, {0, 0}, fun stream_chunked/2).
+
+stream_chunked_error_test_() ->
+       Tests = [
+               {<<>>, undefined},
+               {<<"\n\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, {2, 0}}
+       ],
+       [{lists:flatten(io_lib:format("value ~p state ~p", [V, S])),
+               fun() -> {'EXIT', _} = (catch stream_chunked(V, S)) end}
+                       || {V, S} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_stream_chunked() ->
+       horse:repeat(10000,
+               stream_chunked(<<
+                       "4\r\n"
+                       "Wiki\r\n"
+                       "5\r\n"
+                       "pedia\r\n"
+                       "e\r\n"
+                       " in\r\n\r\nchunks.\r\n"
+                       "0\r\n"
+                       "\r\n">>, {0, 0})
+       ).
+
+horse_stream_chunked_dripfeed() ->
+       horse:repeat(10000,
+               dripfeed(<<
+                       "4\r\n"
+                       "Wiki\r\n"
+                       "5\r\n"
+                       "pedia\r\n"
+                       "e\r\n"
+                       " in\r\n\r\nchunks.\r\n"
+                       "0\r\n"
+                       "\r\n">>, <<>>, {0, 43}, fun stream_chunked/2)
+       ).
+-endif.
diff --git a/deps/cowlib/src/cow_mimetypes.erl b/deps/cowlib/src/cow_mimetypes.erl
new file mode 100644 (file)
index 0000000..69284e1
--- /dev/null
@@ -0,0 +1,1041 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_mimetypes).
+
+-export([all/1]).
+-export([web/1]).
+
+%% @doc Return the mimetype for any file by looking at its extension.
+
+-spec all(binary()) -> {binary(), binary(), []}.
+all(Path) ->
+       case filename:extension(Path) of
+               <<>> -> {<<"application">>, <<"octet-stream">>, []};
+               << $., Ext/binary >> -> all_ext(Ext)
+       end.
+
+%% @doc Return the mimetype for a Web related file by looking at its extension.
+
+-spec web(binary()) -> {binary(), binary(), []}.
+web(Path) ->
+       case filename:extension(Path) of
+               <<>> -> {<<"application">>, <<"octet-stream">>, []};
+               << $., Ext/binary >> -> web_ext(Ext)
+       end.
+
+%% Internal.
+
+%% GENERATED
+all_ext(<<"123">>) -> {<<"application">>, <<"vnd.lotus-1-2-3">>, []};
+all_ext(<<"3dml">>) -> {<<"text">>, <<"vnd.in3d.3dml">>, []};
+all_ext(<<"3ds">>) -> {<<"image">>, <<"x-3ds">>, []};
+all_ext(<<"3g2">>) -> {<<"video">>, <<"3gpp2">>, []};
+all_ext(<<"3gp">>) -> {<<"video">>, <<"3gpp">>, []};
+all_ext(<<"7z">>) -> {<<"application">>, <<"x-7z-compressed">>, []};
+all_ext(<<"aab">>) -> {<<"application">>, <<"x-authorware-bin">>, []};
+all_ext(<<"aac">>) -> {<<"audio">>, <<"x-aac">>, []};
+all_ext(<<"aam">>) -> {<<"application">>, <<"x-authorware-map">>, []};
+all_ext(<<"aas">>) -> {<<"application">>, <<"x-authorware-seg">>, []};
+all_ext(<<"abw">>) -> {<<"application">>, <<"x-abiword">>, []};
+all_ext(<<"ac">>) -> {<<"application">>, <<"pkix-attr-cert">>, []};
+all_ext(<<"acc">>) -> {<<"application">>, <<"vnd.americandynamics.acc">>, []};
+all_ext(<<"ace">>) -> {<<"application">>, <<"x-ace-compressed">>, []};
+all_ext(<<"acu">>) -> {<<"application">>, <<"vnd.acucobol">>, []};
+all_ext(<<"acutc">>) -> {<<"application">>, <<"vnd.acucorp">>, []};
+all_ext(<<"adp">>) -> {<<"audio">>, <<"adpcm">>, []};
+all_ext(<<"aep">>) -> {<<"application">>, <<"vnd.audiograph">>, []};
+all_ext(<<"afm">>) -> {<<"application">>, <<"x-font-type1">>, []};
+all_ext(<<"afp">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []};
+all_ext(<<"ahead">>) -> {<<"application">>, <<"vnd.ahead.space">>, []};
+all_ext(<<"ai">>) -> {<<"application">>, <<"postscript">>, []};
+all_ext(<<"aif">>) -> {<<"audio">>, <<"x-aiff">>, []};
+all_ext(<<"aifc">>) -> {<<"audio">>, <<"x-aiff">>, []};
+all_ext(<<"aiff">>) -> {<<"audio">>, <<"x-aiff">>, []};
+all_ext(<<"air">>) -> {<<"application">>, <<"vnd.adobe.air-application-installer-package+zip">>, []};
+all_ext(<<"ait">>) -> {<<"application">>, <<"vnd.dvb.ait">>, []};
+all_ext(<<"ami">>) -> {<<"application">>, <<"vnd.amiga.ami">>, []};
+all_ext(<<"apk">>) -> {<<"application">>, <<"vnd.android.package-archive">>, []};
+all_ext(<<"appcache">>) -> {<<"text">>, <<"cache-manifest">>, []};
+all_ext(<<"application">>) -> {<<"application">>, <<"x-ms-application">>, []};
+all_ext(<<"apr">>) -> {<<"application">>, <<"vnd.lotus-approach">>, []};
+all_ext(<<"arc">>) -> {<<"application">>, <<"x-freearc">>, []};
+all_ext(<<"asc">>) -> {<<"application">>, <<"pgp-signature">>, []};
+all_ext(<<"asf">>) -> {<<"video">>, <<"x-ms-asf">>, []};
+all_ext(<<"asm">>) -> {<<"text">>, <<"x-asm">>, []};
+all_ext(<<"aso">>) -> {<<"application">>, <<"vnd.accpac.simply.aso">>, []};
+all_ext(<<"asx">>) -> {<<"video">>, <<"x-ms-asf">>, []};
+all_ext(<<"atc">>) -> {<<"application">>, <<"vnd.acucorp">>, []};
+all_ext(<<"atom">>) -> {<<"application">>, <<"atom+xml">>, []};
+all_ext(<<"atomcat">>) -> {<<"application">>, <<"atomcat+xml">>, []};
+all_ext(<<"atomsvc">>) -> {<<"application">>, <<"atomsvc+xml">>, []};
+all_ext(<<"atx">>) -> {<<"application">>, <<"vnd.antix.game-component">>, []};
+all_ext(<<"au">>) -> {<<"audio">>, <<"basic">>, []};
+all_ext(<<"avi">>) -> {<<"video">>, <<"x-msvideo">>, []};
+all_ext(<<"aw">>) -> {<<"application">>, <<"applixware">>, []};
+all_ext(<<"azf">>) -> {<<"application">>, <<"vnd.airzip.filesecure.azf">>, []};
+all_ext(<<"azs">>) -> {<<"application">>, <<"vnd.airzip.filesecure.azs">>, []};
+all_ext(<<"azw">>) -> {<<"application">>, <<"vnd.amazon.ebook">>, []};
+all_ext(<<"bat">>) -> {<<"application">>, <<"x-msdownload">>, []};
+all_ext(<<"bcpio">>) -> {<<"application">>, <<"x-bcpio">>, []};
+all_ext(<<"bdf">>) -> {<<"application">>, <<"x-font-bdf">>, []};
+all_ext(<<"bdm">>) -> {<<"application">>, <<"vnd.syncml.dm+wbxml">>, []};
+all_ext(<<"bed">>) -> {<<"application">>, <<"vnd.realvnc.bed">>, []};
+all_ext(<<"bh2">>) -> {<<"application">>, <<"vnd.fujitsu.oasysprs">>, []};
+all_ext(<<"bin">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"blb">>) -> {<<"application">>, <<"x-blorb">>, []};
+all_ext(<<"blorb">>) -> {<<"application">>, <<"x-blorb">>, []};
+all_ext(<<"bmi">>) -> {<<"application">>, <<"vnd.bmi">>, []};
+all_ext(<<"bmp">>) -> {<<"image">>, <<"bmp">>, []};
+all_ext(<<"book">>) -> {<<"application">>, <<"vnd.framemaker">>, []};
+all_ext(<<"box">>) -> {<<"application">>, <<"vnd.previewsystems.box">>, []};
+all_ext(<<"boz">>) -> {<<"application">>, <<"x-bzip2">>, []};
+all_ext(<<"bpk">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"btif">>) -> {<<"image">>, <<"prs.btif">>, []};
+all_ext(<<"bz2">>) -> {<<"application">>, <<"x-bzip2">>, []};
+all_ext(<<"bz">>) -> {<<"application">>, <<"x-bzip">>, []};
+all_ext(<<"c11amc">>) -> {<<"application">>, <<"vnd.cluetrust.cartomobile-config">>, []};
+all_ext(<<"c11amz">>) -> {<<"application">>, <<"vnd.cluetrust.cartomobile-config-pkg">>, []};
+all_ext(<<"c4d">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []};
+all_ext(<<"c4f">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []};
+all_ext(<<"c4g">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []};
+all_ext(<<"c4p">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []};
+all_ext(<<"c4u">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []};
+all_ext(<<"cab">>) -> {<<"application">>, <<"vnd.ms-cab-compressed">>, []};
+all_ext(<<"caf">>) -> {<<"audio">>, <<"x-caf">>, []};
+all_ext(<<"cap">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []};
+all_ext(<<"car">>) -> {<<"application">>, <<"vnd.curl.car">>, []};
+all_ext(<<"cat">>) -> {<<"application">>, <<"vnd.ms-pki.seccat">>, []};
+all_ext(<<"cb7">>) -> {<<"application">>, <<"x-cbr">>, []};
+all_ext(<<"cba">>) -> {<<"application">>, <<"x-cbr">>, []};
+all_ext(<<"cbr">>) -> {<<"application">>, <<"x-cbr">>, []};
+all_ext(<<"cbt">>) -> {<<"application">>, <<"x-cbr">>, []};
+all_ext(<<"cbz">>) -> {<<"application">>, <<"x-cbr">>, []};
+all_ext(<<"cct">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"cc">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"ccxml">>) -> {<<"application">>, <<"ccxml+xml">>, []};
+all_ext(<<"cdbcmsg">>) -> {<<"application">>, <<"vnd.contact.cmsg">>, []};
+all_ext(<<"cdf">>) -> {<<"application">>, <<"x-netcdf">>, []};
+all_ext(<<"cdkey">>) -> {<<"application">>, <<"vnd.mediastation.cdkey">>, []};
+all_ext(<<"cdmia">>) -> {<<"application">>, <<"cdmi-capability">>, []};
+all_ext(<<"cdmic">>) -> {<<"application">>, <<"cdmi-container">>, []};
+all_ext(<<"cdmid">>) -> {<<"application">>, <<"cdmi-domain">>, []};
+all_ext(<<"cdmio">>) -> {<<"application">>, <<"cdmi-object">>, []};
+all_ext(<<"cdmiq">>) -> {<<"application">>, <<"cdmi-queue">>, []};
+all_ext(<<"cdx">>) -> {<<"chemical">>, <<"x-cdx">>, []};
+all_ext(<<"cdxml">>) -> {<<"application">>, <<"vnd.chemdraw+xml">>, []};
+all_ext(<<"cdy">>) -> {<<"application">>, <<"vnd.cinderella">>, []};
+all_ext(<<"cer">>) -> {<<"application">>, <<"pkix-cert">>, []};
+all_ext(<<"cfs">>) -> {<<"application">>, <<"x-cfs-compressed">>, []};
+all_ext(<<"cgm">>) -> {<<"image">>, <<"cgm">>, []};
+all_ext(<<"chat">>) -> {<<"application">>, <<"x-chat">>, []};
+all_ext(<<"chm">>) -> {<<"application">>, <<"vnd.ms-htmlhelp">>, []};
+all_ext(<<"chrt">>) -> {<<"application">>, <<"vnd.kde.kchart">>, []};
+all_ext(<<"cif">>) -> {<<"chemical">>, <<"x-cif">>, []};
+all_ext(<<"cii">>) -> {<<"application">>, <<"vnd.anser-web-certificate-issue-initiation">>, []};
+all_ext(<<"cil">>) -> {<<"application">>, <<"vnd.ms-artgalry">>, []};
+all_ext(<<"cla">>) -> {<<"application">>, <<"vnd.claymore">>, []};
+all_ext(<<"class">>) -> {<<"application">>, <<"java-vm">>, []};
+all_ext(<<"clkk">>) -> {<<"application">>, <<"vnd.crick.clicker.keyboard">>, []};
+all_ext(<<"clkp">>) -> {<<"application">>, <<"vnd.crick.clicker.palette">>, []};
+all_ext(<<"clkt">>) -> {<<"application">>, <<"vnd.crick.clicker.template">>, []};
+all_ext(<<"clkw">>) -> {<<"application">>, <<"vnd.crick.clicker.wordbank">>, []};
+all_ext(<<"clkx">>) -> {<<"application">>, <<"vnd.crick.clicker">>, []};
+all_ext(<<"clp">>) -> {<<"application">>, <<"x-msclip">>, []};
+all_ext(<<"cmc">>) -> {<<"application">>, <<"vnd.cosmocaller">>, []};
+all_ext(<<"cmdf">>) -> {<<"chemical">>, <<"x-cmdf">>, []};
+all_ext(<<"cml">>) -> {<<"chemical">>, <<"x-cml">>, []};
+all_ext(<<"cmp">>) -> {<<"application">>, <<"vnd.yellowriver-custom-menu">>, []};
+all_ext(<<"cmx">>) -> {<<"image">>, <<"x-cmx">>, []};
+all_ext(<<"cod">>) -> {<<"application">>, <<"vnd.rim.cod">>, []};
+all_ext(<<"com">>) -> {<<"application">>, <<"x-msdownload">>, []};
+all_ext(<<"conf">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"cpio">>) -> {<<"application">>, <<"x-cpio">>, []};
+all_ext(<<"cpp">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"cpt">>) -> {<<"application">>, <<"mac-compactpro">>, []};
+all_ext(<<"crd">>) -> {<<"application">>, <<"x-mscardfile">>, []};
+all_ext(<<"crl">>) -> {<<"application">>, <<"pkix-crl">>, []};
+all_ext(<<"crt">>) -> {<<"application">>, <<"x-x509-ca-cert">>, []};
+all_ext(<<"cryptonote">>) -> {<<"application">>, <<"vnd.rig.cryptonote">>, []};
+all_ext(<<"csh">>) -> {<<"application">>, <<"x-csh">>, []};
+all_ext(<<"csml">>) -> {<<"chemical">>, <<"x-csml">>, []};
+all_ext(<<"csp">>) -> {<<"application">>, <<"vnd.commonspace">>, []};
+all_ext(<<"css">>) -> {<<"text">>, <<"css">>, []};
+all_ext(<<"cst">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"csv">>) -> {<<"text">>, <<"csv">>, []};
+all_ext(<<"c">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"cu">>) -> {<<"application">>, <<"cu-seeme">>, []};
+all_ext(<<"curl">>) -> {<<"text">>, <<"vnd.curl">>, []};
+all_ext(<<"cww">>) -> {<<"application">>, <<"prs.cww">>, []};
+all_ext(<<"cxt">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"cxx">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"dae">>) -> {<<"model">>, <<"vnd.collada+xml">>, []};
+all_ext(<<"daf">>) -> {<<"application">>, <<"vnd.mobius.daf">>, []};
+all_ext(<<"dart">>) -> {<<"application">>, <<"vnd.dart">>, []};
+all_ext(<<"dataless">>) -> {<<"application">>, <<"vnd.fdsn.seed">>, []};
+all_ext(<<"davmount">>) -> {<<"application">>, <<"davmount+xml">>, []};
+all_ext(<<"dbk">>) -> {<<"application">>, <<"docbook+xml">>, []};
+all_ext(<<"dcr">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"dcurl">>) -> {<<"text">>, <<"vnd.curl.dcurl">>, []};
+all_ext(<<"dd2">>) -> {<<"application">>, <<"vnd.oma.dd2+xml">>, []};
+all_ext(<<"ddd">>) -> {<<"application">>, <<"vnd.fujixerox.ddd">>, []};
+all_ext(<<"deb">>) -> {<<"application">>, <<"x-debian-package">>, []};
+all_ext(<<"def">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"deploy">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"der">>) -> {<<"application">>, <<"x-x509-ca-cert">>, []};
+all_ext(<<"dfac">>) -> {<<"application">>, <<"vnd.dreamfactory">>, []};
+all_ext(<<"dgc">>) -> {<<"application">>, <<"x-dgc-compressed">>, []};
+all_ext(<<"dic">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"dir">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"dis">>) -> {<<"application">>, <<"vnd.mobius.dis">>, []};
+all_ext(<<"dist">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"distz">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"djv">>) -> {<<"image">>, <<"vnd.djvu">>, []};
+all_ext(<<"djvu">>) -> {<<"image">>, <<"vnd.djvu">>, []};
+all_ext(<<"dll">>) -> {<<"application">>, <<"x-msdownload">>, []};
+all_ext(<<"dmg">>) -> {<<"application">>, <<"x-apple-diskimage">>, []};
+all_ext(<<"dmp">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []};
+all_ext(<<"dms">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"dna">>) -> {<<"application">>, <<"vnd.dna">>, []};
+all_ext(<<"doc">>) -> {<<"application">>, <<"msword">>, []};
+all_ext(<<"docm">>) -> {<<"application">>, <<"vnd.ms-word.document.macroenabled.12">>, []};
+all_ext(<<"docx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.wordprocessingml.document">>, []};
+all_ext(<<"dot">>) -> {<<"application">>, <<"msword">>, []};
+all_ext(<<"dotm">>) -> {<<"application">>, <<"vnd.ms-word.template.macroenabled.12">>, []};
+all_ext(<<"dotx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.wordprocessingml.template">>, []};
+all_ext(<<"dp">>) -> {<<"application">>, <<"vnd.osgi.dp">>, []};
+all_ext(<<"dpg">>) -> {<<"application">>, <<"vnd.dpgraph">>, []};
+all_ext(<<"dra">>) -> {<<"audio">>, <<"vnd.dra">>, []};
+all_ext(<<"dsc">>) -> {<<"text">>, <<"prs.lines.tag">>, []};
+all_ext(<<"dssc">>) -> {<<"application">>, <<"dssc+der">>, []};
+all_ext(<<"dtb">>) -> {<<"application">>, <<"x-dtbook+xml">>, []};
+all_ext(<<"dtd">>) -> {<<"application">>, <<"xml-dtd">>, []};
+all_ext(<<"dts">>) -> {<<"audio">>, <<"vnd.dts">>, []};
+all_ext(<<"dtshd">>) -> {<<"audio">>, <<"vnd.dts.hd">>, []};
+all_ext(<<"dump">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"dvb">>) -> {<<"video">>, <<"vnd.dvb.file">>, []};
+all_ext(<<"dvi">>) -> {<<"application">>, <<"x-dvi">>, []};
+all_ext(<<"dwf">>) -> {<<"model">>, <<"vnd.dwf">>, []};
+all_ext(<<"dwg">>) -> {<<"image">>, <<"vnd.dwg">>, []};
+all_ext(<<"dxf">>) -> {<<"image">>, <<"vnd.dxf">>, []};
+all_ext(<<"dxp">>) -> {<<"application">>, <<"vnd.spotfire.dxp">>, []};
+all_ext(<<"dxr">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"ecelp4800">>) -> {<<"audio">>, <<"vnd.nuera.ecelp4800">>, []};
+all_ext(<<"ecelp7470">>) -> {<<"audio">>, <<"vnd.nuera.ecelp7470">>, []};
+all_ext(<<"ecelp9600">>) -> {<<"audio">>, <<"vnd.nuera.ecelp9600">>, []};
+all_ext(<<"ecma">>) -> {<<"application">>, <<"ecmascript">>, []};
+all_ext(<<"edm">>) -> {<<"application">>, <<"vnd.novadigm.edm">>, []};
+all_ext(<<"edx">>) -> {<<"application">>, <<"vnd.novadigm.edx">>, []};
+all_ext(<<"efif">>) -> {<<"application">>, <<"vnd.picsel">>, []};
+all_ext(<<"ei6">>) -> {<<"application">>, <<"vnd.pg.osasli">>, []};
+all_ext(<<"elc">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"emf">>) -> {<<"application">>, <<"x-msmetafile">>, []};
+all_ext(<<"eml">>) -> {<<"message">>, <<"rfc822">>, []};
+all_ext(<<"emma">>) -> {<<"application">>, <<"emma+xml">>, []};
+all_ext(<<"emz">>) -> {<<"application">>, <<"x-msmetafile">>, []};
+all_ext(<<"eol">>) -> {<<"audio">>, <<"vnd.digital-winds">>, []};
+all_ext(<<"eot">>) -> {<<"application">>, <<"vnd.ms-fontobject">>, []};
+all_ext(<<"eps">>) -> {<<"application">>, <<"postscript">>, []};
+all_ext(<<"epub">>) -> {<<"application">>, <<"epub+zip">>, []};
+all_ext(<<"es3">>) -> {<<"application">>, <<"vnd.eszigno3+xml">>, []};
+all_ext(<<"esa">>) -> {<<"application">>, <<"vnd.osgi.subsystem">>, []};
+all_ext(<<"esf">>) -> {<<"application">>, <<"vnd.epson.esf">>, []};
+all_ext(<<"et3">>) -> {<<"application">>, <<"vnd.eszigno3+xml">>, []};
+all_ext(<<"etx">>) -> {<<"text">>, <<"x-setext">>, []};
+all_ext(<<"eva">>) -> {<<"application">>, <<"x-eva">>, []};
+all_ext(<<"evy">>) -> {<<"application">>, <<"x-envoy">>, []};
+all_ext(<<"exe">>) -> {<<"application">>, <<"x-msdownload">>, []};
+all_ext(<<"exi">>) -> {<<"application">>, <<"exi">>, []};
+all_ext(<<"ext">>) -> {<<"application">>, <<"vnd.novadigm.ext">>, []};
+all_ext(<<"ez2">>) -> {<<"application">>, <<"vnd.ezpix-album">>, []};
+all_ext(<<"ez3">>) -> {<<"application">>, <<"vnd.ezpix-package">>, []};
+all_ext(<<"ez">>) -> {<<"application">>, <<"andrew-inset">>, []};
+all_ext(<<"f4v">>) -> {<<"video">>, <<"x-f4v">>, []};
+all_ext(<<"f77">>) -> {<<"text">>, <<"x-fortran">>, []};
+all_ext(<<"f90">>) -> {<<"text">>, <<"x-fortran">>, []};
+all_ext(<<"fbs">>) -> {<<"image">>, <<"vnd.fastbidsheet">>, []};
+all_ext(<<"fcdt">>) -> {<<"application">>, <<"vnd.adobe.formscentral.fcdt">>, []};
+all_ext(<<"fcs">>) -> {<<"application">>, <<"vnd.isac.fcs">>, []};
+all_ext(<<"fdf">>) -> {<<"application">>, <<"vnd.fdf">>, []};
+all_ext(<<"fe_launch">>) -> {<<"application">>, <<"vnd.denovo.fcselayout-link">>, []};
+all_ext(<<"fg5">>) -> {<<"application">>, <<"vnd.fujitsu.oasysgp">>, []};
+all_ext(<<"fgd">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"fh4">>) -> {<<"image">>, <<"x-freehand">>, []};
+all_ext(<<"fh5">>) -> {<<"image">>, <<"x-freehand">>, []};
+all_ext(<<"fh7">>) -> {<<"image">>, <<"x-freehand">>, []};
+all_ext(<<"fhc">>) -> {<<"image">>, <<"x-freehand">>, []};
+all_ext(<<"fh">>) -> {<<"image">>, <<"x-freehand">>, []};
+all_ext(<<"fig">>) -> {<<"application">>, <<"x-xfig">>, []};
+all_ext(<<"flac">>) -> {<<"audio">>, <<"x-flac">>, []};
+all_ext(<<"fli">>) -> {<<"video">>, <<"x-fli">>, []};
+all_ext(<<"flo">>) -> {<<"application">>, <<"vnd.micrografx.flo">>, []};
+all_ext(<<"flv">>) -> {<<"video">>, <<"x-flv">>, []};
+all_ext(<<"flw">>) -> {<<"application">>, <<"vnd.kde.kivio">>, []};
+all_ext(<<"flx">>) -> {<<"text">>, <<"vnd.fmi.flexstor">>, []};
+all_ext(<<"fly">>) -> {<<"text">>, <<"vnd.fly">>, []};
+all_ext(<<"fm">>) -> {<<"application">>, <<"vnd.framemaker">>, []};
+all_ext(<<"fnc">>) -> {<<"application">>, <<"vnd.frogans.fnc">>, []};
+all_ext(<<"for">>) -> {<<"text">>, <<"x-fortran">>, []};
+all_ext(<<"fpx">>) -> {<<"image">>, <<"vnd.fpx">>, []};
+all_ext(<<"frame">>) -> {<<"application">>, <<"vnd.framemaker">>, []};
+all_ext(<<"fsc">>) -> {<<"application">>, <<"vnd.fsc.weblaunch">>, []};
+all_ext(<<"fst">>) -> {<<"image">>, <<"vnd.fst">>, []};
+all_ext(<<"ftc">>) -> {<<"application">>, <<"vnd.fluxtime.clip">>, []};
+all_ext(<<"f">>) -> {<<"text">>, <<"x-fortran">>, []};
+all_ext(<<"fti">>) -> {<<"application">>, <<"vnd.anser-web-funds-transfer-initiation">>, []};
+all_ext(<<"fvt">>) -> {<<"video">>, <<"vnd.fvt">>, []};
+all_ext(<<"fxp">>) -> {<<"application">>, <<"vnd.adobe.fxp">>, []};
+all_ext(<<"fxpl">>) -> {<<"application">>, <<"vnd.adobe.fxp">>, []};
+all_ext(<<"fzs">>) -> {<<"application">>, <<"vnd.fuzzysheet">>, []};
+all_ext(<<"g2w">>) -> {<<"application">>, <<"vnd.geoplan">>, []};
+all_ext(<<"g3">>) -> {<<"image">>, <<"g3fax">>, []};
+all_ext(<<"g3w">>) -> {<<"application">>, <<"vnd.geospace">>, []};
+all_ext(<<"gac">>) -> {<<"application">>, <<"vnd.groove-account">>, []};
+all_ext(<<"gam">>) -> {<<"application">>, <<"x-tads">>, []};
+all_ext(<<"gbr">>) -> {<<"application">>, <<"rpki-ghostbusters">>, []};
+all_ext(<<"gca">>) -> {<<"application">>, <<"x-gca-compressed">>, []};
+all_ext(<<"gdl">>) -> {<<"model">>, <<"vnd.gdl">>, []};
+all_ext(<<"geo">>) -> {<<"application">>, <<"vnd.dynageo">>, []};
+all_ext(<<"gex">>) -> {<<"application">>, <<"vnd.geometry-explorer">>, []};
+all_ext(<<"ggb">>) -> {<<"application">>, <<"vnd.geogebra.file">>, []};
+all_ext(<<"ggt">>) -> {<<"application">>, <<"vnd.geogebra.tool">>, []};
+all_ext(<<"ghf">>) -> {<<"application">>, <<"vnd.groove-help">>, []};
+all_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []};
+all_ext(<<"gim">>) -> {<<"application">>, <<"vnd.groove-identity-message">>, []};
+all_ext(<<"gml">>) -> {<<"application">>, <<"gml+xml">>, []};
+all_ext(<<"gmx">>) -> {<<"application">>, <<"vnd.gmx">>, []};
+all_ext(<<"gnumeric">>) -> {<<"application">>, <<"x-gnumeric">>, []};
+all_ext(<<"gph">>) -> {<<"application">>, <<"vnd.flographit">>, []};
+all_ext(<<"gpx">>) -> {<<"application">>, <<"gpx+xml">>, []};
+all_ext(<<"gqf">>) -> {<<"application">>, <<"vnd.grafeq">>, []};
+all_ext(<<"gqs">>) -> {<<"application">>, <<"vnd.grafeq">>, []};
+all_ext(<<"gram">>) -> {<<"application">>, <<"srgs">>, []};
+all_ext(<<"gramps">>) -> {<<"application">>, <<"x-gramps-xml">>, []};
+all_ext(<<"gre">>) -> {<<"application">>, <<"vnd.geometry-explorer">>, []};
+all_ext(<<"grv">>) -> {<<"application">>, <<"vnd.groove-injector">>, []};
+all_ext(<<"grxml">>) -> {<<"application">>, <<"srgs+xml">>, []};
+all_ext(<<"gsf">>) -> {<<"application">>, <<"x-font-ghostscript">>, []};
+all_ext(<<"gtar">>) -> {<<"application">>, <<"x-gtar">>, []};
+all_ext(<<"gtm">>) -> {<<"application">>, <<"vnd.groove-tool-message">>, []};
+all_ext(<<"gtw">>) -> {<<"model">>, <<"vnd.gtw">>, []};
+all_ext(<<"gv">>) -> {<<"text">>, <<"vnd.graphviz">>, []};
+all_ext(<<"gxf">>) -> {<<"application">>, <<"gxf">>, []};
+all_ext(<<"gxt">>) -> {<<"application">>, <<"vnd.geonext">>, []};
+all_ext(<<"h261">>) -> {<<"video">>, <<"h261">>, []};
+all_ext(<<"h263">>) -> {<<"video">>, <<"h263">>, []};
+all_ext(<<"h264">>) -> {<<"video">>, <<"h264">>, []};
+all_ext(<<"hal">>) -> {<<"application">>, <<"vnd.hal+xml">>, []};
+all_ext(<<"hbci">>) -> {<<"application">>, <<"vnd.hbci">>, []};
+all_ext(<<"hdf">>) -> {<<"application">>, <<"x-hdf">>, []};
+all_ext(<<"hh">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"hlp">>) -> {<<"application">>, <<"winhlp">>, []};
+all_ext(<<"hpgl">>) -> {<<"application">>, <<"vnd.hp-hpgl">>, []};
+all_ext(<<"hpid">>) -> {<<"application">>, <<"vnd.hp-hpid">>, []};
+all_ext(<<"hps">>) -> {<<"application">>, <<"vnd.hp-hps">>, []};
+all_ext(<<"hqx">>) -> {<<"application">>, <<"mac-binhex40">>, []};
+all_ext(<<"h">>) -> {<<"text">>, <<"x-c">>, []};
+all_ext(<<"htke">>) -> {<<"application">>, <<"vnd.kenameaapp">>, []};
+all_ext(<<"html">>) -> {<<"text">>, <<"html">>, []};
+all_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []};
+all_ext(<<"hvd">>) -> {<<"application">>, <<"vnd.yamaha.hv-dic">>, []};
+all_ext(<<"hvp">>) -> {<<"application">>, <<"vnd.yamaha.hv-voice">>, []};
+all_ext(<<"hvs">>) -> {<<"application">>, <<"vnd.yamaha.hv-script">>, []};
+all_ext(<<"i2g">>) -> {<<"application">>, <<"vnd.intergeo">>, []};
+all_ext(<<"icc">>) -> {<<"application">>, <<"vnd.iccprofile">>, []};
+all_ext(<<"ice">>) -> {<<"x-conference">>, <<"x-cooltalk">>, []};
+all_ext(<<"icm">>) -> {<<"application">>, <<"vnd.iccprofile">>, []};
+all_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []};
+all_ext(<<"ics">>) -> {<<"text">>, <<"calendar">>, []};
+all_ext(<<"ief">>) -> {<<"image">>, <<"ief">>, []};
+all_ext(<<"ifb">>) -> {<<"text">>, <<"calendar">>, []};
+all_ext(<<"ifm">>) -> {<<"application">>, <<"vnd.shana.informed.formdata">>, []};
+all_ext(<<"iges">>) -> {<<"model">>, <<"iges">>, []};
+all_ext(<<"igl">>) -> {<<"application">>, <<"vnd.igloader">>, []};
+all_ext(<<"igm">>) -> {<<"application">>, <<"vnd.insors.igm">>, []};
+all_ext(<<"igs">>) -> {<<"model">>, <<"iges">>, []};
+all_ext(<<"igx">>) -> {<<"application">>, <<"vnd.micrografx.igx">>, []};
+all_ext(<<"iif">>) -> {<<"application">>, <<"vnd.shana.informed.interchange">>, []};
+all_ext(<<"imp">>) -> {<<"application">>, <<"vnd.accpac.simply.imp">>, []};
+all_ext(<<"ims">>) -> {<<"application">>, <<"vnd.ms-ims">>, []};
+all_ext(<<"ink">>) -> {<<"application">>, <<"inkml+xml">>, []};
+all_ext(<<"inkml">>) -> {<<"application">>, <<"inkml+xml">>, []};
+all_ext(<<"install">>) -> {<<"application">>, <<"x-install-instructions">>, []};
+all_ext(<<"in">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"iota">>) -> {<<"application">>, <<"vnd.astraea-software.iota">>, []};
+all_ext(<<"ipfix">>) -> {<<"application">>, <<"ipfix">>, []};
+all_ext(<<"ipk">>) -> {<<"application">>, <<"vnd.shana.informed.package">>, []};
+all_ext(<<"irm">>) -> {<<"application">>, <<"vnd.ibm.rights-management">>, []};
+all_ext(<<"irp">>) -> {<<"application">>, <<"vnd.irepository.package+xml">>, []};
+all_ext(<<"iso">>) -> {<<"application">>, <<"x-iso9660-image">>, []};
+all_ext(<<"itp">>) -> {<<"application">>, <<"vnd.shana.informed.formtemplate">>, []};
+all_ext(<<"ivp">>) -> {<<"application">>, <<"vnd.immervision-ivp">>, []};
+all_ext(<<"ivu">>) -> {<<"application">>, <<"vnd.immervision-ivu">>, []};
+all_ext(<<"jad">>) -> {<<"text">>, <<"vnd.sun.j2me.app-descriptor">>, []};
+all_ext(<<"jam">>) -> {<<"application">>, <<"vnd.jam">>, []};
+all_ext(<<"jar">>) -> {<<"application">>, <<"java-archive">>, []};
+all_ext(<<"java">>) -> {<<"text">>, <<"x-java-source">>, []};
+all_ext(<<"jisp">>) -> {<<"application">>, <<"vnd.jisp">>, []};
+all_ext(<<"jlt">>) -> {<<"application">>, <<"vnd.hp-jlyt">>, []};
+all_ext(<<"jnlp">>) -> {<<"application">>, <<"x-java-jnlp-file">>, []};
+all_ext(<<"joda">>) -> {<<"application">>, <<"vnd.joost.joda-archive">>, []};
+all_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []};
+all_ext(<<"jpe">>) -> {<<"image">>, <<"jpeg">>, []};
+all_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []};
+all_ext(<<"jpgm">>) -> {<<"video">>, <<"jpm">>, []};
+all_ext(<<"jpgv">>) -> {<<"video">>, <<"jpeg">>, []};
+all_ext(<<"jpm">>) -> {<<"video">>, <<"jpm">>, []};
+all_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []};
+all_ext(<<"json">>) -> {<<"application">>, <<"json">>, []};
+all_ext(<<"jsonml">>) -> {<<"application">>, <<"jsonml+json">>, []};
+all_ext(<<"kar">>) -> {<<"audio">>, <<"midi">>, []};
+all_ext(<<"karbon">>) -> {<<"application">>, <<"vnd.kde.karbon">>, []};
+all_ext(<<"kfo">>) -> {<<"application">>, <<"vnd.kde.kformula">>, []};
+all_ext(<<"kia">>) -> {<<"application">>, <<"vnd.kidspiration">>, []};
+all_ext(<<"kml">>) -> {<<"application">>, <<"vnd.google-earth.kml+xml">>, []};
+all_ext(<<"kmz">>) -> {<<"application">>, <<"vnd.google-earth.kmz">>, []};
+all_ext(<<"kne">>) -> {<<"application">>, <<"vnd.kinar">>, []};
+all_ext(<<"knp">>) -> {<<"application">>, <<"vnd.kinar">>, []};
+all_ext(<<"kon">>) -> {<<"application">>, <<"vnd.kde.kontour">>, []};
+all_ext(<<"kpr">>) -> {<<"application">>, <<"vnd.kde.kpresenter">>, []};
+all_ext(<<"kpt">>) -> {<<"application">>, <<"vnd.kde.kpresenter">>, []};
+all_ext(<<"kpxx">>) -> {<<"application">>, <<"vnd.ds-keypoint">>, []};
+all_ext(<<"ksp">>) -> {<<"application">>, <<"vnd.kde.kspread">>, []};
+all_ext(<<"ktr">>) -> {<<"application">>, <<"vnd.kahootz">>, []};
+all_ext(<<"ktx">>) -> {<<"image">>, <<"ktx">>, []};
+all_ext(<<"ktz">>) -> {<<"application">>, <<"vnd.kahootz">>, []};
+all_ext(<<"kwd">>) -> {<<"application">>, <<"vnd.kde.kword">>, []};
+all_ext(<<"kwt">>) -> {<<"application">>, <<"vnd.kde.kword">>, []};
+all_ext(<<"lasxml">>) -> {<<"application">>, <<"vnd.las.las+xml">>, []};
+all_ext(<<"latex">>) -> {<<"application">>, <<"x-latex">>, []};
+all_ext(<<"lbd">>) -> {<<"application">>, <<"vnd.llamagraphics.life-balance.desktop">>, []};
+all_ext(<<"lbe">>) -> {<<"application">>, <<"vnd.llamagraphics.life-balance.exchange+xml">>, []};
+all_ext(<<"les">>) -> {<<"application">>, <<"vnd.hhe.lesson-player">>, []};
+all_ext(<<"lha">>) -> {<<"application">>, <<"x-lzh-compressed">>, []};
+all_ext(<<"link66">>) -> {<<"application">>, <<"vnd.route66.link66+xml">>, []};
+all_ext(<<"list3820">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []};
+all_ext(<<"listafp">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []};
+all_ext(<<"list">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"lnk">>) -> {<<"application">>, <<"x-ms-shortcut">>, []};
+all_ext(<<"log">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"lostxml">>) -> {<<"application">>, <<"lost+xml">>, []};
+all_ext(<<"lrf">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"lrm">>) -> {<<"application">>, <<"vnd.ms-lrm">>, []};
+all_ext(<<"ltf">>) -> {<<"application">>, <<"vnd.frogans.ltf">>, []};
+all_ext(<<"lvp">>) -> {<<"audio">>, <<"vnd.lucent.voice">>, []};
+all_ext(<<"lwp">>) -> {<<"application">>, <<"vnd.lotus-wordpro">>, []};
+all_ext(<<"lzh">>) -> {<<"application">>, <<"x-lzh-compressed">>, []};
+all_ext(<<"m13">>) -> {<<"application">>, <<"x-msmediaview">>, []};
+all_ext(<<"m14">>) -> {<<"application">>, <<"x-msmediaview">>, []};
+all_ext(<<"m1v">>) -> {<<"video">>, <<"mpeg">>, []};
+all_ext(<<"m21">>) -> {<<"application">>, <<"mp21">>, []};
+all_ext(<<"m2a">>) -> {<<"audio">>, <<"mpeg">>, []};
+all_ext(<<"m2v">>) -> {<<"video">>, <<"mpeg">>, []};
+all_ext(<<"m3a">>) -> {<<"audio">>, <<"mpeg">>, []};
+all_ext(<<"m3u8">>) -> {<<"application">>, <<"vnd.apple.mpegurl">>, []};
+all_ext(<<"m3u">>) -> {<<"audio">>, <<"x-mpegurl">>, []};
+all_ext(<<"m4u">>) -> {<<"video">>, <<"vnd.mpegurl">>, []};
+all_ext(<<"m4v">>) -> {<<"video">>, <<"x-m4v">>, []};
+all_ext(<<"ma">>) -> {<<"application">>, <<"mathematica">>, []};
+all_ext(<<"mads">>) -> {<<"application">>, <<"mads+xml">>, []};
+all_ext(<<"mag">>) -> {<<"application">>, <<"vnd.ecowin.chart">>, []};
+all_ext(<<"maker">>) -> {<<"application">>, <<"vnd.framemaker">>, []};
+all_ext(<<"man">>) -> {<<"text">>, <<"troff">>, []};
+all_ext(<<"mar">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"mathml">>) -> {<<"application">>, <<"mathml+xml">>, []};
+all_ext(<<"mb">>) -> {<<"application">>, <<"mathematica">>, []};
+all_ext(<<"mbk">>) -> {<<"application">>, <<"vnd.mobius.mbk">>, []};
+all_ext(<<"mbox">>) -> {<<"application">>, <<"mbox">>, []};
+all_ext(<<"mc1">>) -> {<<"application">>, <<"vnd.medcalcdata">>, []};
+all_ext(<<"mcd">>) -> {<<"application">>, <<"vnd.mcd">>, []};
+all_ext(<<"mcurl">>) -> {<<"text">>, <<"vnd.curl.mcurl">>, []};
+all_ext(<<"mdb">>) -> {<<"application">>, <<"x-msaccess">>, []};
+all_ext(<<"mdi">>) -> {<<"image">>, <<"vnd.ms-modi">>, []};
+all_ext(<<"mesh">>) -> {<<"model">>, <<"mesh">>, []};
+all_ext(<<"meta4">>) -> {<<"application">>, <<"metalink4+xml">>, []};
+all_ext(<<"metalink">>) -> {<<"application">>, <<"metalink+xml">>, []};
+all_ext(<<"me">>) -> {<<"text">>, <<"troff">>, []};
+all_ext(<<"mets">>) -> {<<"application">>, <<"mets+xml">>, []};
+all_ext(<<"mfm">>) -> {<<"application">>, <<"vnd.mfmp">>, []};
+all_ext(<<"mft">>) -> {<<"application">>, <<"rpki-manifest">>, []};
+all_ext(<<"mgp">>) -> {<<"application">>, <<"vnd.osgeo.mapguide.package">>, []};
+all_ext(<<"mgz">>) -> {<<"application">>, <<"vnd.proteus.magazine">>, []};
+all_ext(<<"mid">>) -> {<<"audio">>, <<"midi">>, []};
+all_ext(<<"midi">>) -> {<<"audio">>, <<"midi">>, []};
+all_ext(<<"mie">>) -> {<<"application">>, <<"x-mie">>, []};
+all_ext(<<"mif">>) -> {<<"application">>, <<"vnd.mif">>, []};
+all_ext(<<"mime">>) -> {<<"message">>, <<"rfc822">>, []};
+all_ext(<<"mj2">>) -> {<<"video">>, <<"mj2">>, []};
+all_ext(<<"mjp2">>) -> {<<"video">>, <<"mj2">>, []};
+all_ext(<<"mk3d">>) -> {<<"video">>, <<"x-matroska">>, []};
+all_ext(<<"mka">>) -> {<<"audio">>, <<"x-matroska">>, []};
+all_ext(<<"mks">>) -> {<<"video">>, <<"x-matroska">>, []};
+all_ext(<<"mkv">>) -> {<<"video">>, <<"x-matroska">>, []};
+all_ext(<<"mlp">>) -> {<<"application">>, <<"vnd.dolby.mlp">>, []};
+all_ext(<<"mmd">>) -> {<<"application">>, <<"vnd.chipnuts.karaoke-mmd">>, []};
+all_ext(<<"mmf">>) -> {<<"application">>, <<"vnd.smaf">>, []};
+all_ext(<<"mmr">>) -> {<<"image">>, <<"vnd.fujixerox.edmics-mmr">>, []};
+all_ext(<<"mng">>) -> {<<"video">>, <<"x-mng">>, []};
+all_ext(<<"mny">>) -> {<<"application">>, <<"x-msmoney">>, []};
+all_ext(<<"mobi">>) -> {<<"application">>, <<"x-mobipocket-ebook">>, []};
+all_ext(<<"mods">>) -> {<<"application">>, <<"mods+xml">>, []};
+all_ext(<<"movie">>) -> {<<"video">>, <<"x-sgi-movie">>, []};
+all_ext(<<"mov">>) -> {<<"video">>, <<"quicktime">>, []};
+all_ext(<<"mp21">>) -> {<<"application">>, <<"mp21">>, []};
+all_ext(<<"mp2a">>) -> {<<"audio">>, <<"mpeg">>, []};
+all_ext(<<"mp2">>) -> {<<"audio">>, <<"mpeg">>, []};
+all_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []};
+all_ext(<<"mp4a">>) -> {<<"audio">>, <<"mp4">>, []};
+all_ext(<<"mp4s">>) -> {<<"application">>, <<"mp4">>, []};
+all_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []};
+all_ext(<<"mp4v">>) -> {<<"video">>, <<"mp4">>, []};
+all_ext(<<"mpc">>) -> {<<"application">>, <<"vnd.mophun.certificate">>, []};
+all_ext(<<"mpeg">>) -> {<<"video">>, <<"mpeg">>, []};
+all_ext(<<"mpe">>) -> {<<"video">>, <<"mpeg">>, []};
+all_ext(<<"mpg4">>) -> {<<"video">>, <<"mp4">>, []};
+all_ext(<<"mpga">>) -> {<<"audio">>, <<"mpeg">>, []};
+all_ext(<<"mpg">>) -> {<<"video">>, <<"mpeg">>, []};
+all_ext(<<"mpkg">>) -> {<<"application">>, <<"vnd.apple.installer+xml">>, []};
+all_ext(<<"mpm">>) -> {<<"application">>, <<"vnd.blueice.multipass">>, []};
+all_ext(<<"mpn">>) -> {<<"application">>, <<"vnd.mophun.application">>, []};
+all_ext(<<"mpp">>) -> {<<"application">>, <<"vnd.ms-project">>, []};
+all_ext(<<"mpt">>) -> {<<"application">>, <<"vnd.ms-project">>, []};
+all_ext(<<"mpy">>) -> {<<"application">>, <<"vnd.ibm.minipay">>, []};
+all_ext(<<"mqy">>) -> {<<"application">>, <<"vnd.mobius.mqy">>, []};
+all_ext(<<"mrc">>) -> {<<"application">>, <<"marc">>, []};
+all_ext(<<"mrcx">>) -> {<<"application">>, <<"marcxml+xml">>, []};
+all_ext(<<"mscml">>) -> {<<"application">>, <<"mediaservercontrol+xml">>, []};
+all_ext(<<"mseed">>) -> {<<"application">>, <<"vnd.fdsn.mseed">>, []};
+all_ext(<<"mseq">>) -> {<<"application">>, <<"vnd.mseq">>, []};
+all_ext(<<"msf">>) -> {<<"application">>, <<"vnd.epson.msf">>, []};
+all_ext(<<"msh">>) -> {<<"model">>, <<"mesh">>, []};
+all_ext(<<"msi">>) -> {<<"application">>, <<"x-msdownload">>, []};
+all_ext(<<"msl">>) -> {<<"application">>, <<"vnd.mobius.msl">>, []};
+all_ext(<<"ms">>) -> {<<"text">>, <<"troff">>, []};
+all_ext(<<"msty">>) -> {<<"application">>, <<"vnd.muvee.style">>, []};
+all_ext(<<"mts">>) -> {<<"model">>, <<"vnd.mts">>, []};
+all_ext(<<"mus">>) -> {<<"application">>, <<"vnd.musician">>, []};
+all_ext(<<"musicxml">>) -> {<<"application">>, <<"vnd.recordare.musicxml+xml">>, []};
+all_ext(<<"mvb">>) -> {<<"application">>, <<"x-msmediaview">>, []};
+all_ext(<<"mwf">>) -> {<<"application">>, <<"vnd.mfer">>, []};
+all_ext(<<"mxf">>) -> {<<"application">>, <<"mxf">>, []};
+all_ext(<<"mxl">>) -> {<<"application">>, <<"vnd.recordare.musicxml">>, []};
+all_ext(<<"mxml">>) -> {<<"application">>, <<"xv+xml">>, []};
+all_ext(<<"mxs">>) -> {<<"application">>, <<"vnd.triscape.mxs">>, []};
+all_ext(<<"mxu">>) -> {<<"video">>, <<"vnd.mpegurl">>, []};
+all_ext(<<"n3">>) -> {<<"text">>, <<"n3">>, []};
+all_ext(<<"nb">>) -> {<<"application">>, <<"mathematica">>, []};
+all_ext(<<"nbp">>) -> {<<"application">>, <<"vnd.wolfram.player">>, []};
+all_ext(<<"nc">>) -> {<<"application">>, <<"x-netcdf">>, []};
+all_ext(<<"ncx">>) -> {<<"application">>, <<"x-dtbncx+xml">>, []};
+all_ext(<<"nfo">>) -> {<<"text">>, <<"x-nfo">>, []};
+all_ext(<<"n-gage">>) -> {<<"application">>, <<"vnd.nokia.n-gage.symbian.install">>, []};
+all_ext(<<"ngdat">>) -> {<<"application">>, <<"vnd.nokia.n-gage.data">>, []};
+all_ext(<<"nitf">>) -> {<<"application">>, <<"vnd.nitf">>, []};
+all_ext(<<"nlu">>) -> {<<"application">>, <<"vnd.neurolanguage.nlu">>, []};
+all_ext(<<"nml">>) -> {<<"application">>, <<"vnd.enliven">>, []};
+all_ext(<<"nnd">>) -> {<<"application">>, <<"vnd.noblenet-directory">>, []};
+all_ext(<<"nns">>) -> {<<"application">>, <<"vnd.noblenet-sealer">>, []};
+all_ext(<<"nnw">>) -> {<<"application">>, <<"vnd.noblenet-web">>, []};
+all_ext(<<"npx">>) -> {<<"image">>, <<"vnd.net-fpx">>, []};
+all_ext(<<"nsc">>) -> {<<"application">>, <<"x-conference">>, []};
+all_ext(<<"nsf">>) -> {<<"application">>, <<"vnd.lotus-notes">>, []};
+all_ext(<<"ntf">>) -> {<<"application">>, <<"vnd.nitf">>, []};
+all_ext(<<"nzb">>) -> {<<"application">>, <<"x-nzb">>, []};
+all_ext(<<"oa2">>) -> {<<"application">>, <<"vnd.fujitsu.oasys2">>, []};
+all_ext(<<"oa3">>) -> {<<"application">>, <<"vnd.fujitsu.oasys3">>, []};
+all_ext(<<"oas">>) -> {<<"application">>, <<"vnd.fujitsu.oasys">>, []};
+all_ext(<<"obd">>) -> {<<"application">>, <<"x-msbinder">>, []};
+all_ext(<<"obj">>) -> {<<"application">>, <<"x-tgif">>, []};
+all_ext(<<"oda">>) -> {<<"application">>, <<"oda">>, []};
+all_ext(<<"odb">>) -> {<<"application">>, <<"vnd.oasis.opendocument.database">>, []};
+all_ext(<<"odc">>) -> {<<"application">>, <<"vnd.oasis.opendocument.chart">>, []};
+all_ext(<<"odf">>) -> {<<"application">>, <<"vnd.oasis.opendocument.formula">>, []};
+all_ext(<<"odft">>) -> {<<"application">>, <<"vnd.oasis.opendocument.formula-template">>, []};
+all_ext(<<"odg">>) -> {<<"application">>, <<"vnd.oasis.opendocument.graphics">>, []};
+all_ext(<<"odi">>) -> {<<"application">>, <<"vnd.oasis.opendocument.image">>, []};
+all_ext(<<"odm">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-master">>, []};
+all_ext(<<"odp">>) -> {<<"application">>, <<"vnd.oasis.opendocument.presentation">>, []};
+all_ext(<<"ods">>) -> {<<"application">>, <<"vnd.oasis.opendocument.spreadsheet">>, []};
+all_ext(<<"odt">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text">>, []};
+all_ext(<<"oga">>) -> {<<"audio">>, <<"ogg">>, []};
+all_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []};
+all_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []};
+all_ext(<<"ogx">>) -> {<<"application">>, <<"ogg">>, []};
+all_ext(<<"omdoc">>) -> {<<"application">>, <<"omdoc+xml">>, []};
+all_ext(<<"onepkg">>) -> {<<"application">>, <<"onenote">>, []};
+all_ext(<<"onetmp">>) -> {<<"application">>, <<"onenote">>, []};
+all_ext(<<"onetoc2">>) -> {<<"application">>, <<"onenote">>, []};
+all_ext(<<"onetoc">>) -> {<<"application">>, <<"onenote">>, []};
+all_ext(<<"opf">>) -> {<<"application">>, <<"oebps-package+xml">>, []};
+all_ext(<<"opml">>) -> {<<"text">>, <<"x-opml">>, []};
+all_ext(<<"oprc">>) -> {<<"application">>, <<"vnd.palm">>, []};
+all_ext(<<"org">>) -> {<<"application">>, <<"vnd.lotus-organizer">>, []};
+all_ext(<<"osf">>) -> {<<"application">>, <<"vnd.yamaha.openscoreformat">>, []};
+all_ext(<<"osfpvg">>) -> {<<"application">>, <<"vnd.yamaha.openscoreformat.osfpvg+xml">>, []};
+all_ext(<<"otc">>) -> {<<"application">>, <<"vnd.oasis.opendocument.chart-template">>, []};
+all_ext(<<"otf">>) -> {<<"application">>, <<"x-font-otf">>, []};
+all_ext(<<"otg">>) -> {<<"application">>, <<"vnd.oasis.opendocument.graphics-template">>, []};
+all_ext(<<"oth">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-web">>, []};
+all_ext(<<"oti">>) -> {<<"application">>, <<"vnd.oasis.opendocument.image-template">>, []};
+all_ext(<<"otp">>) -> {<<"application">>, <<"vnd.oasis.opendocument.presentation-template">>, []};
+all_ext(<<"ots">>) -> {<<"application">>, <<"vnd.oasis.opendocument.spreadsheet-template">>, []};
+all_ext(<<"ott">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-template">>, []};
+all_ext(<<"oxps">>) -> {<<"application">>, <<"oxps">>, []};
+all_ext(<<"oxt">>) -> {<<"application">>, <<"vnd.openofficeorg.extension">>, []};
+all_ext(<<"p10">>) -> {<<"application">>, <<"pkcs10">>, []};
+all_ext(<<"p12">>) -> {<<"application">>, <<"x-pkcs12">>, []};
+all_ext(<<"p7b">>) -> {<<"application">>, <<"x-pkcs7-certificates">>, []};
+all_ext(<<"p7c">>) -> {<<"application">>, <<"pkcs7-mime">>, []};
+all_ext(<<"p7m">>) -> {<<"application">>, <<"pkcs7-mime">>, []};
+all_ext(<<"p7r">>) -> {<<"application">>, <<"x-pkcs7-certreqresp">>, []};
+all_ext(<<"p7s">>) -> {<<"application">>, <<"pkcs7-signature">>, []};
+all_ext(<<"p8">>) -> {<<"application">>, <<"pkcs8">>, []};
+all_ext(<<"pas">>) -> {<<"text">>, <<"x-pascal">>, []};
+all_ext(<<"paw">>) -> {<<"application">>, <<"vnd.pawaafile">>, []};
+all_ext(<<"pbd">>) -> {<<"application">>, <<"vnd.powerbuilder6">>, []};
+all_ext(<<"pbm">>) -> {<<"image">>, <<"x-portable-bitmap">>, []};
+all_ext(<<"pcap">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []};
+all_ext(<<"pcf">>) -> {<<"application">>, <<"x-font-pcf">>, []};
+all_ext(<<"pcl">>) -> {<<"application">>, <<"vnd.hp-pcl">>, []};
+all_ext(<<"pclxl">>) -> {<<"application">>, <<"vnd.hp-pclxl">>, []};
+all_ext(<<"pct">>) -> {<<"image">>, <<"x-pict">>, []};
+all_ext(<<"pcurl">>) -> {<<"application">>, <<"vnd.curl.pcurl">>, []};
+all_ext(<<"pcx">>) -> {<<"image">>, <<"x-pcx">>, []};
+all_ext(<<"pdb">>) -> {<<"application">>, <<"vnd.palm">>, []};
+all_ext(<<"pdf">>) -> {<<"application">>, <<"pdf">>, []};
+all_ext(<<"pfa">>) -> {<<"application">>, <<"x-font-type1">>, []};
+all_ext(<<"pfb">>) -> {<<"application">>, <<"x-font-type1">>, []};
+all_ext(<<"pfm">>) -> {<<"application">>, <<"x-font-type1">>, []};
+all_ext(<<"pfr">>) -> {<<"application">>, <<"font-tdpfr">>, []};
+all_ext(<<"pfx">>) -> {<<"application">>, <<"x-pkcs12">>, []};
+all_ext(<<"pgm">>) -> {<<"image">>, <<"x-portable-graymap">>, []};
+all_ext(<<"pgn">>) -> {<<"application">>, <<"x-chess-pgn">>, []};
+all_ext(<<"pgp">>) -> {<<"application">>, <<"pgp-encrypted">>, []};
+all_ext(<<"pic">>) -> {<<"image">>, <<"x-pict">>, []};
+all_ext(<<"pkg">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"pki">>) -> {<<"application">>, <<"pkixcmp">>, []};
+all_ext(<<"pkipath">>) -> {<<"application">>, <<"pkix-pkipath">>, []};
+all_ext(<<"plb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-large">>, []};
+all_ext(<<"plc">>) -> {<<"application">>, <<"vnd.mobius.plc">>, []};
+all_ext(<<"plf">>) -> {<<"application">>, <<"vnd.pocketlearn">>, []};
+all_ext(<<"pls">>) -> {<<"application">>, <<"pls+xml">>, []};
+all_ext(<<"pml">>) -> {<<"application">>, <<"vnd.ctc-posml">>, []};
+all_ext(<<"png">>) -> {<<"image">>, <<"png">>, []};
+all_ext(<<"pnm">>) -> {<<"image">>, <<"x-portable-anymap">>, []};
+all_ext(<<"portpkg">>) -> {<<"application">>, <<"vnd.macports.portpkg">>, []};
+all_ext(<<"pot">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []};
+all_ext(<<"potm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.template.macroenabled.12">>, []};
+all_ext(<<"potx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.template">>, []};
+all_ext(<<"ppam">>) -> {<<"application">>, <<"vnd.ms-powerpoint.addin.macroenabled.12">>, []};
+all_ext(<<"ppd">>) -> {<<"application">>, <<"vnd.cups-ppd">>, []};
+all_ext(<<"ppm">>) -> {<<"image">>, <<"x-portable-pixmap">>, []};
+all_ext(<<"pps">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []};
+all_ext(<<"ppsm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.slideshow.macroenabled.12">>, []};
+all_ext(<<"ppsx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.slideshow">>, []};
+all_ext(<<"ppt">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []};
+all_ext(<<"pptm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.presentation.macroenabled.12">>, []};
+all_ext(<<"pptx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.presentation">>, []};
+all_ext(<<"pqa">>) -> {<<"application">>, <<"vnd.palm">>, []};
+all_ext(<<"prc">>) -> {<<"application">>, <<"x-mobipocket-ebook">>, []};
+all_ext(<<"pre">>) -> {<<"application">>, <<"vnd.lotus-freelance">>, []};
+all_ext(<<"prf">>) -> {<<"application">>, <<"pics-rules">>, []};
+all_ext(<<"ps">>) -> {<<"application">>, <<"postscript">>, []};
+all_ext(<<"psb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-small">>, []};
+all_ext(<<"psd">>) -> {<<"image">>, <<"vnd.adobe.photoshop">>, []};
+all_ext(<<"psf">>) -> {<<"application">>, <<"x-font-linux-psf">>, []};
+all_ext(<<"pskcxml">>) -> {<<"application">>, <<"pskc+xml">>, []};
+all_ext(<<"p">>) -> {<<"text">>, <<"x-pascal">>, []};
+all_ext(<<"ptid">>) -> {<<"application">>, <<"vnd.pvi.ptid1">>, []};
+all_ext(<<"pub">>) -> {<<"application">>, <<"x-mspublisher">>, []};
+all_ext(<<"pvb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-var">>, []};
+all_ext(<<"pwn">>) -> {<<"application">>, <<"vnd.3m.post-it-notes">>, []};
+all_ext(<<"pya">>) -> {<<"audio">>, <<"vnd.ms-playready.media.pya">>, []};
+all_ext(<<"pyv">>) -> {<<"video">>, <<"vnd.ms-playready.media.pyv">>, []};
+all_ext(<<"qam">>) -> {<<"application">>, <<"vnd.epson.quickanime">>, []};
+all_ext(<<"qbo">>) -> {<<"application">>, <<"vnd.intu.qbo">>, []};
+all_ext(<<"qfx">>) -> {<<"application">>, <<"vnd.intu.qfx">>, []};
+all_ext(<<"qps">>) -> {<<"application">>, <<"vnd.publishare-delta-tree">>, []};
+all_ext(<<"qt">>) -> {<<"video">>, <<"quicktime">>, []};
+all_ext(<<"qwd">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []};
+all_ext(<<"qwt">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []};
+all_ext(<<"qxb">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []};
+all_ext(<<"qxd">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []};
+all_ext(<<"qxl">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []};
+all_ext(<<"qxt">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []};
+all_ext(<<"ra">>) -> {<<"audio">>, <<"x-pn-realaudio">>, []};
+all_ext(<<"ram">>) -> {<<"audio">>, <<"x-pn-realaudio">>, []};
+all_ext(<<"rar">>) -> {<<"application">>, <<"x-rar-compressed">>, []};
+all_ext(<<"ras">>) -> {<<"image">>, <<"x-cmu-raster">>, []};
+all_ext(<<"rcprofile">>) -> {<<"application">>, <<"vnd.ipunplugged.rcprofile">>, []};
+all_ext(<<"rdf">>) -> {<<"application">>, <<"rdf+xml">>, []};
+all_ext(<<"rdz">>) -> {<<"application">>, <<"vnd.data-vision.rdz">>, []};
+all_ext(<<"rep">>) -> {<<"application">>, <<"vnd.businessobjects">>, []};
+all_ext(<<"res">>) -> {<<"application">>, <<"x-dtbresource+xml">>, []};
+all_ext(<<"rgb">>) -> {<<"image">>, <<"x-rgb">>, []};
+all_ext(<<"rif">>) -> {<<"application">>, <<"reginfo+xml">>, []};
+all_ext(<<"rip">>) -> {<<"audio">>, <<"vnd.rip">>, []};
+all_ext(<<"ris">>) -> {<<"application">>, <<"x-research-info-systems">>, []};
+all_ext(<<"rl">>) -> {<<"application">>, <<"resource-lists+xml">>, []};
+all_ext(<<"rlc">>) -> {<<"image">>, <<"vnd.fujixerox.edmics-rlc">>, []};
+all_ext(<<"rld">>) -> {<<"application">>, <<"resource-lists-diff+xml">>, []};
+all_ext(<<"rm">>) -> {<<"application">>, <<"vnd.rn-realmedia">>, []};
+all_ext(<<"rmi">>) -> {<<"audio">>, <<"midi">>, []};
+all_ext(<<"rmp">>) -> {<<"audio">>, <<"x-pn-realaudio-plugin">>, []};
+all_ext(<<"rms">>) -> {<<"application">>, <<"vnd.jcp.javame.midlet-rms">>, []};
+all_ext(<<"rmvb">>) -> {<<"application">>, <<"vnd.rn-realmedia-vbr">>, []};
+all_ext(<<"rnc">>) -> {<<"application">>, <<"relax-ng-compact-syntax">>, []};
+all_ext(<<"roa">>) -> {<<"application">>, <<"rpki-roa">>, []};
+all_ext(<<"roff">>) -> {<<"text">>, <<"troff">>, []};
+all_ext(<<"rp9">>) -> {<<"application">>, <<"vnd.cloanto.rp9">>, []};
+all_ext(<<"rpss">>) -> {<<"application">>, <<"vnd.nokia.radio-presets">>, []};
+all_ext(<<"rpst">>) -> {<<"application">>, <<"vnd.nokia.radio-preset">>, []};
+all_ext(<<"rq">>) -> {<<"application">>, <<"sparql-query">>, []};
+all_ext(<<"rs">>) -> {<<"application">>, <<"rls-services+xml">>, []};
+all_ext(<<"rsd">>) -> {<<"application">>, <<"rsd+xml">>, []};
+all_ext(<<"rss">>) -> {<<"application">>, <<"rss+xml">>, []};
+all_ext(<<"rtf">>) -> {<<"application">>, <<"rtf">>, []};
+all_ext(<<"rtx">>) -> {<<"text">>, <<"richtext">>, []};
+all_ext(<<"s3m">>) -> {<<"audio">>, <<"s3m">>, []};
+all_ext(<<"saf">>) -> {<<"application">>, <<"vnd.yamaha.smaf-audio">>, []};
+all_ext(<<"sbml">>) -> {<<"application">>, <<"sbml+xml">>, []};
+all_ext(<<"sc">>) -> {<<"application">>, <<"vnd.ibm.secure-container">>, []};
+all_ext(<<"scd">>) -> {<<"application">>, <<"x-msschedule">>, []};
+all_ext(<<"scm">>) -> {<<"application">>, <<"vnd.lotus-screencam">>, []};
+all_ext(<<"scq">>) -> {<<"application">>, <<"scvp-cv-request">>, []};
+all_ext(<<"scs">>) -> {<<"application">>, <<"scvp-cv-response">>, []};
+all_ext(<<"scurl">>) -> {<<"text">>, <<"vnd.curl.scurl">>, []};
+all_ext(<<"sda">>) -> {<<"application">>, <<"vnd.stardivision.draw">>, []};
+all_ext(<<"sdc">>) -> {<<"application">>, <<"vnd.stardivision.calc">>, []};
+all_ext(<<"sdd">>) -> {<<"application">>, <<"vnd.stardivision.impress">>, []};
+all_ext(<<"sdkd">>) -> {<<"application">>, <<"vnd.solent.sdkm+xml">>, []};
+all_ext(<<"sdkm">>) -> {<<"application">>, <<"vnd.solent.sdkm+xml">>, []};
+all_ext(<<"sdp">>) -> {<<"application">>, <<"sdp">>, []};
+all_ext(<<"sdw">>) -> {<<"application">>, <<"vnd.stardivision.writer">>, []};
+all_ext(<<"see">>) -> {<<"application">>, <<"vnd.seemail">>, []};
+all_ext(<<"seed">>) -> {<<"application">>, <<"vnd.fdsn.seed">>, []};
+all_ext(<<"sema">>) -> {<<"application">>, <<"vnd.sema">>, []};
+all_ext(<<"semd">>) -> {<<"application">>, <<"vnd.semd">>, []};
+all_ext(<<"semf">>) -> {<<"application">>, <<"vnd.semf">>, []};
+all_ext(<<"ser">>) -> {<<"application">>, <<"java-serialized-object">>, []};
+all_ext(<<"setpay">>) -> {<<"application">>, <<"set-payment-initiation">>, []};
+all_ext(<<"setreg">>) -> {<<"application">>, <<"set-registration-initiation">>, []};
+all_ext(<<"sfd-hdstx">>) -> {<<"application">>, <<"vnd.hydrostatix.sof-data">>, []};
+all_ext(<<"sfs">>) -> {<<"application">>, <<"vnd.spotfire.sfs">>, []};
+all_ext(<<"sfv">>) -> {<<"text">>, <<"x-sfv">>, []};
+all_ext(<<"sgi">>) -> {<<"image">>, <<"sgi">>, []};
+all_ext(<<"sgl">>) -> {<<"application">>, <<"vnd.stardivision.writer-global">>, []};
+all_ext(<<"sgml">>) -> {<<"text">>, <<"sgml">>, []};
+all_ext(<<"sgm">>) -> {<<"text">>, <<"sgml">>, []};
+all_ext(<<"sh">>) -> {<<"application">>, <<"x-sh">>, []};
+all_ext(<<"shar">>) -> {<<"application">>, <<"x-shar">>, []};
+all_ext(<<"shf">>) -> {<<"application">>, <<"shf+xml">>, []};
+all_ext(<<"sid">>) -> {<<"image">>, <<"x-mrsid-image">>, []};
+all_ext(<<"sig">>) -> {<<"application">>, <<"pgp-signature">>, []};
+all_ext(<<"sil">>) -> {<<"audio">>, <<"silk">>, []};
+all_ext(<<"silo">>) -> {<<"model">>, <<"mesh">>, []};
+all_ext(<<"sis">>) -> {<<"application">>, <<"vnd.symbian.install">>, []};
+all_ext(<<"sisx">>) -> {<<"application">>, <<"vnd.symbian.install">>, []};
+all_ext(<<"sit">>) -> {<<"application">>, <<"x-stuffit">>, []};
+all_ext(<<"sitx">>) -> {<<"application">>, <<"x-stuffitx">>, []};
+all_ext(<<"skd">>) -> {<<"application">>, <<"vnd.koan">>, []};
+all_ext(<<"skm">>) -> {<<"application">>, <<"vnd.koan">>, []};
+all_ext(<<"skp">>) -> {<<"application">>, <<"vnd.koan">>, []};
+all_ext(<<"skt">>) -> {<<"application">>, <<"vnd.koan">>, []};
+all_ext(<<"sldm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.slide.macroenabled.12">>, []};
+all_ext(<<"sldx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.slide">>, []};
+all_ext(<<"slt">>) -> {<<"application">>, <<"vnd.epson.salt">>, []};
+all_ext(<<"sm">>) -> {<<"application">>, <<"vnd.stepmania.stepchart">>, []};
+all_ext(<<"smf">>) -> {<<"application">>, <<"vnd.stardivision.math">>, []};
+all_ext(<<"smi">>) -> {<<"application">>, <<"smil+xml">>, []};
+all_ext(<<"smil">>) -> {<<"application">>, <<"smil+xml">>, []};
+all_ext(<<"smv">>) -> {<<"video">>, <<"x-smv">>, []};
+all_ext(<<"smzip">>) -> {<<"application">>, <<"vnd.stepmania.package">>, []};
+all_ext(<<"snd">>) -> {<<"audio">>, <<"basic">>, []};
+all_ext(<<"snf">>) -> {<<"application">>, <<"x-font-snf">>, []};
+all_ext(<<"so">>) -> {<<"application">>, <<"octet-stream">>, []};
+all_ext(<<"spc">>) -> {<<"application">>, <<"x-pkcs7-certificates">>, []};
+all_ext(<<"spf">>) -> {<<"application">>, <<"vnd.yamaha.smaf-phrase">>, []};
+all_ext(<<"spl">>) -> {<<"application">>, <<"x-futuresplash">>, []};
+all_ext(<<"spot">>) -> {<<"text">>, <<"vnd.in3d.spot">>, []};
+all_ext(<<"spp">>) -> {<<"application">>, <<"scvp-vp-response">>, []};
+all_ext(<<"spq">>) -> {<<"application">>, <<"scvp-vp-request">>, []};
+all_ext(<<"spx">>) -> {<<"audio">>, <<"ogg">>, []};
+all_ext(<<"sql">>) -> {<<"application">>, <<"x-sql">>, []};
+all_ext(<<"src">>) -> {<<"application">>, <<"x-wais-source">>, []};
+all_ext(<<"srt">>) -> {<<"application">>, <<"x-subrip">>, []};
+all_ext(<<"sru">>) -> {<<"application">>, <<"sru+xml">>, []};
+all_ext(<<"srx">>) -> {<<"application">>, <<"sparql-results+xml">>, []};
+all_ext(<<"ssdl">>) -> {<<"application">>, <<"ssdl+xml">>, []};
+all_ext(<<"sse">>) -> {<<"application">>, <<"vnd.kodak-descriptor">>, []};
+all_ext(<<"ssf">>) -> {<<"application">>, <<"vnd.epson.ssf">>, []};
+all_ext(<<"ssml">>) -> {<<"application">>, <<"ssml+xml">>, []};
+all_ext(<<"st">>) -> {<<"application">>, <<"vnd.sailingtracker.track">>, []};
+all_ext(<<"stc">>) -> {<<"application">>, <<"vnd.sun.xml.calc.template">>, []};
+all_ext(<<"std">>) -> {<<"application">>, <<"vnd.sun.xml.draw.template">>, []};
+all_ext(<<"s">>) -> {<<"text">>, <<"x-asm">>, []};
+all_ext(<<"stf">>) -> {<<"application">>, <<"vnd.wt.stf">>, []};
+all_ext(<<"sti">>) -> {<<"application">>, <<"vnd.sun.xml.impress.template">>, []};
+all_ext(<<"stk">>) -> {<<"application">>, <<"hyperstudio">>, []};
+all_ext(<<"stl">>) -> {<<"application">>, <<"vnd.ms-pki.stl">>, []};
+all_ext(<<"str">>) -> {<<"application">>, <<"vnd.pg.format">>, []};
+all_ext(<<"stw">>) -> {<<"application">>, <<"vnd.sun.xml.writer.template">>, []};
+all_ext(<<"sub">>) -> {<<"image">>, <<"vnd.dvb.subtitle">>, []};
+all_ext(<<"sus">>) -> {<<"application">>, <<"vnd.sus-calendar">>, []};
+all_ext(<<"susp">>) -> {<<"application">>, <<"vnd.sus-calendar">>, []};
+all_ext(<<"sv4cpio">>) -> {<<"application">>, <<"x-sv4cpio">>, []};
+all_ext(<<"sv4crc">>) -> {<<"application">>, <<"x-sv4crc">>, []};
+all_ext(<<"svc">>) -> {<<"application">>, <<"vnd.dvb.service">>, []};
+all_ext(<<"svd">>) -> {<<"application">>, <<"vnd.svd">>, []};
+all_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []};
+all_ext(<<"svgz">>) -> {<<"image">>, <<"svg+xml">>, []};
+all_ext(<<"swa">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"swf">>) -> {<<"application">>, <<"x-shockwave-flash">>, []};
+all_ext(<<"swi">>) -> {<<"application">>, <<"vnd.aristanetworks.swi">>, []};
+all_ext(<<"sxc">>) -> {<<"application">>, <<"vnd.sun.xml.calc">>, []};
+all_ext(<<"sxd">>) -> {<<"application">>, <<"vnd.sun.xml.draw">>, []};
+all_ext(<<"sxg">>) -> {<<"application">>, <<"vnd.sun.xml.writer.global">>, []};
+all_ext(<<"sxi">>) -> {<<"application">>, <<"vnd.sun.xml.impress">>, []};
+all_ext(<<"sxm">>) -> {<<"application">>, <<"vnd.sun.xml.math">>, []};
+all_ext(<<"sxw">>) -> {<<"application">>, <<"vnd.sun.xml.writer">>, []};
+all_ext(<<"t3">>) -> {<<"application">>, <<"x-t3vm-image">>, []};
+all_ext(<<"taglet">>) -> {<<"application">>, <<"vnd.mynfc">>, []};
+all_ext(<<"tao">>) -> {<<"application">>, <<"vnd.tao.intent-module-archive">>, []};
+all_ext(<<"tar">>) -> {<<"application">>, <<"x-tar">>, []};
+all_ext(<<"tcap">>) -> {<<"application">>, <<"vnd.3gpp2.tcap">>, []};
+all_ext(<<"tcl">>) -> {<<"application">>, <<"x-tcl">>, []};
+all_ext(<<"teacher">>) -> {<<"application">>, <<"vnd.smart.teacher">>, []};
+all_ext(<<"tei">>) -> {<<"application">>, <<"tei+xml">>, []};
+all_ext(<<"teicorpus">>) -> {<<"application">>, <<"tei+xml">>, []};
+all_ext(<<"tex">>) -> {<<"application">>, <<"x-tex">>, []};
+all_ext(<<"texi">>) -> {<<"application">>, <<"x-texinfo">>, []};
+all_ext(<<"texinfo">>) -> {<<"application">>, <<"x-texinfo">>, []};
+all_ext(<<"text">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"tfi">>) -> {<<"application">>, <<"thraud+xml">>, []};
+all_ext(<<"tfm">>) -> {<<"application">>, <<"x-tex-tfm">>, []};
+all_ext(<<"tga">>) -> {<<"image">>, <<"x-tga">>, []};
+all_ext(<<"thmx">>) -> {<<"application">>, <<"vnd.ms-officetheme">>, []};
+all_ext(<<"tiff">>) -> {<<"image">>, <<"tiff">>, []};
+all_ext(<<"tif">>) -> {<<"image">>, <<"tiff">>, []};
+all_ext(<<"tmo">>) -> {<<"application">>, <<"vnd.tmobile-livetv">>, []};
+all_ext(<<"torrent">>) -> {<<"application">>, <<"x-bittorrent">>, []};
+all_ext(<<"tpl">>) -> {<<"application">>, <<"vnd.groove-tool-template">>, []};
+all_ext(<<"tpt">>) -> {<<"application">>, <<"vnd.trid.tpt">>, []};
+all_ext(<<"tra">>) -> {<<"application">>, <<"vnd.trueapp">>, []};
+all_ext(<<"trm">>) -> {<<"application">>, <<"x-msterminal">>, []};
+all_ext(<<"tr">>) -> {<<"text">>, <<"troff">>, []};
+all_ext(<<"tsd">>) -> {<<"application">>, <<"timestamped-data">>, []};
+all_ext(<<"tsv">>) -> {<<"text">>, <<"tab-separated-values">>, []};
+all_ext(<<"ttc">>) -> {<<"application">>, <<"x-font-ttf">>, []};
+all_ext(<<"t">>) -> {<<"text">>, <<"troff">>, []};
+all_ext(<<"ttf">>) -> {<<"application">>, <<"x-font-ttf">>, []};
+all_ext(<<"ttl">>) -> {<<"text">>, <<"turtle">>, []};
+all_ext(<<"twd">>) -> {<<"application">>, <<"vnd.simtech-mindmapper">>, []};
+all_ext(<<"twds">>) -> {<<"application">>, <<"vnd.simtech-mindmapper">>, []};
+all_ext(<<"txd">>) -> {<<"application">>, <<"vnd.genomatix.tuxedo">>, []};
+all_ext(<<"txf">>) -> {<<"application">>, <<"vnd.mobius.txf">>, []};
+all_ext(<<"txt">>) -> {<<"text">>, <<"plain">>, []};
+all_ext(<<"u32">>) -> {<<"application">>, <<"x-authorware-bin">>, []};
+all_ext(<<"udeb">>) -> {<<"application">>, <<"x-debian-package">>, []};
+all_ext(<<"ufd">>) -> {<<"application">>, <<"vnd.ufdl">>, []};
+all_ext(<<"ufdl">>) -> {<<"application">>, <<"vnd.ufdl">>, []};
+all_ext(<<"ulx">>) -> {<<"application">>, <<"x-glulx">>, []};
+all_ext(<<"umj">>) -> {<<"application">>, <<"vnd.umajin">>, []};
+all_ext(<<"unityweb">>) -> {<<"application">>, <<"vnd.unity">>, []};
+all_ext(<<"uoml">>) -> {<<"application">>, <<"vnd.uoml+xml">>, []};
+all_ext(<<"uris">>) -> {<<"text">>, <<"uri-list">>, []};
+all_ext(<<"uri">>) -> {<<"text">>, <<"uri-list">>, []};
+all_ext(<<"urls">>) -> {<<"text">>, <<"uri-list">>, []};
+all_ext(<<"ustar">>) -> {<<"application">>, <<"x-ustar">>, []};
+all_ext(<<"utz">>) -> {<<"application">>, <<"vnd.uiq.theme">>, []};
+all_ext(<<"uu">>) -> {<<"text">>, <<"x-uuencode">>, []};
+all_ext(<<"uva">>) -> {<<"audio">>, <<"vnd.dece.audio">>, []};
+all_ext(<<"uvd">>) -> {<<"application">>, <<"vnd.dece.data">>, []};
+all_ext(<<"uvf">>) -> {<<"application">>, <<"vnd.dece.data">>, []};
+all_ext(<<"uvg">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []};
+all_ext(<<"uvh">>) -> {<<"video">>, <<"vnd.dece.hd">>, []};
+all_ext(<<"uvi">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []};
+all_ext(<<"uvm">>) -> {<<"video">>, <<"vnd.dece.mobile">>, []};
+all_ext(<<"uvp">>) -> {<<"video">>, <<"vnd.dece.pd">>, []};
+all_ext(<<"uvs">>) -> {<<"video">>, <<"vnd.dece.sd">>, []};
+all_ext(<<"uvt">>) -> {<<"application">>, <<"vnd.dece.ttml+xml">>, []};
+all_ext(<<"uvu">>) -> {<<"video">>, <<"vnd.uvvu.mp4">>, []};
+all_ext(<<"uvva">>) -> {<<"audio">>, <<"vnd.dece.audio">>, []};
+all_ext(<<"uvvd">>) -> {<<"application">>, <<"vnd.dece.data">>, []};
+all_ext(<<"uvvf">>) -> {<<"application">>, <<"vnd.dece.data">>, []};
+all_ext(<<"uvvg">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []};
+all_ext(<<"uvvh">>) -> {<<"video">>, <<"vnd.dece.hd">>, []};
+all_ext(<<"uvvi">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []};
+all_ext(<<"uvvm">>) -> {<<"video">>, <<"vnd.dece.mobile">>, []};
+all_ext(<<"uvvp">>) -> {<<"video">>, <<"vnd.dece.pd">>, []};
+all_ext(<<"uvvs">>) -> {<<"video">>, <<"vnd.dece.sd">>, []};
+all_ext(<<"uvvt">>) -> {<<"application">>, <<"vnd.dece.ttml+xml">>, []};
+all_ext(<<"uvvu">>) -> {<<"video">>, <<"vnd.uvvu.mp4">>, []};
+all_ext(<<"uvv">>) -> {<<"video">>, <<"vnd.dece.video">>, []};
+all_ext(<<"uvvv">>) -> {<<"video">>, <<"vnd.dece.video">>, []};
+all_ext(<<"uvvx">>) -> {<<"application">>, <<"vnd.dece.unspecified">>, []};
+all_ext(<<"uvvz">>) -> {<<"application">>, <<"vnd.dece.zip">>, []};
+all_ext(<<"uvx">>) -> {<<"application">>, <<"vnd.dece.unspecified">>, []};
+all_ext(<<"uvz">>) -> {<<"application">>, <<"vnd.dece.zip">>, []};
+all_ext(<<"vcard">>) -> {<<"text">>, <<"vcard">>, []};
+all_ext(<<"vcd">>) -> {<<"application">>, <<"x-cdlink">>, []};
+all_ext(<<"vcf">>) -> {<<"text">>, <<"x-vcard">>, []};
+all_ext(<<"vcg">>) -> {<<"application">>, <<"vnd.groove-vcard">>, []};
+all_ext(<<"vcs">>) -> {<<"text">>, <<"x-vcalendar">>, []};
+all_ext(<<"vcx">>) -> {<<"application">>, <<"vnd.vcx">>, []};
+all_ext(<<"vis">>) -> {<<"application">>, <<"vnd.visionary">>, []};
+all_ext(<<"viv">>) -> {<<"video">>, <<"vnd.vivo">>, []};
+all_ext(<<"vob">>) -> {<<"video">>, <<"x-ms-vob">>, []};
+all_ext(<<"vor">>) -> {<<"application">>, <<"vnd.stardivision.writer">>, []};
+all_ext(<<"vox">>) -> {<<"application">>, <<"x-authorware-bin">>, []};
+all_ext(<<"vrml">>) -> {<<"model">>, <<"vrml">>, []};
+all_ext(<<"vsd">>) -> {<<"application">>, <<"vnd.visio">>, []};
+all_ext(<<"vsf">>) -> {<<"application">>, <<"vnd.vsf">>, []};
+all_ext(<<"vss">>) -> {<<"application">>, <<"vnd.visio">>, []};
+all_ext(<<"vst">>) -> {<<"application">>, <<"vnd.visio">>, []};
+all_ext(<<"vsw">>) -> {<<"application">>, <<"vnd.visio">>, []};
+all_ext(<<"vtu">>) -> {<<"model">>, <<"vnd.vtu">>, []};
+all_ext(<<"vxml">>) -> {<<"application">>, <<"voicexml+xml">>, []};
+all_ext(<<"w3d">>) -> {<<"application">>, <<"x-director">>, []};
+all_ext(<<"wad">>) -> {<<"application">>, <<"x-doom">>, []};
+all_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []};
+all_ext(<<"wax">>) -> {<<"audio">>, <<"x-ms-wax">>, []};
+all_ext(<<"wbmp">>) -> {<<"image">>, <<"vnd.wap.wbmp">>, []};
+all_ext(<<"wbs">>) -> {<<"application">>, <<"vnd.criticaltools.wbs+xml">>, []};
+all_ext(<<"wbxml">>) -> {<<"application">>, <<"vnd.wap.wbxml">>, []};
+all_ext(<<"wcm">>) -> {<<"application">>, <<"vnd.ms-works">>, []};
+all_ext(<<"wdb">>) -> {<<"application">>, <<"vnd.ms-works">>, []};
+all_ext(<<"wdp">>) -> {<<"image">>, <<"vnd.ms-photo">>, []};
+all_ext(<<"weba">>) -> {<<"audio">>, <<"webm">>, []};
+all_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []};
+all_ext(<<"webp">>) -> {<<"image">>, <<"webp">>, []};
+all_ext(<<"wg">>) -> {<<"application">>, <<"vnd.pmi.widget">>, []};
+all_ext(<<"wgt">>) -> {<<"application">>, <<"widget">>, []};
+all_ext(<<"wks">>) -> {<<"application">>, <<"vnd.ms-works">>, []};
+all_ext(<<"wma">>) -> {<<"audio">>, <<"x-ms-wma">>, []};
+all_ext(<<"wmd">>) -> {<<"application">>, <<"x-ms-wmd">>, []};
+all_ext(<<"wmf">>) -> {<<"application">>, <<"x-msmetafile">>, []};
+all_ext(<<"wmlc">>) -> {<<"application">>, <<"vnd.wap.wmlc">>, []};
+all_ext(<<"wmlsc">>) -> {<<"application">>, <<"vnd.wap.wmlscriptc">>, []};
+all_ext(<<"wmls">>) -> {<<"text">>, <<"vnd.wap.wmlscript">>, []};
+all_ext(<<"wml">>) -> {<<"text">>, <<"vnd.wap.wml">>, []};
+all_ext(<<"wm">>) -> {<<"video">>, <<"x-ms-wm">>, []};
+all_ext(<<"wmv">>) -> {<<"video">>, <<"x-ms-wmv">>, []};
+all_ext(<<"wmx">>) -> {<<"video">>, <<"x-ms-wmx">>, []};
+all_ext(<<"wmz">>) -> {<<"application">>, <<"x-msmetafile">>, []};
+all_ext(<<"woff">>) -> {<<"application">>, <<"font-woff">>, []};
+all_ext(<<"wpd">>) -> {<<"application">>, <<"vnd.wordperfect">>, []};
+all_ext(<<"wpl">>) -> {<<"application">>, <<"vnd.ms-wpl">>, []};
+all_ext(<<"wps">>) -> {<<"application">>, <<"vnd.ms-works">>, []};
+all_ext(<<"wqd">>) -> {<<"application">>, <<"vnd.wqd">>, []};
+all_ext(<<"wri">>) -> {<<"application">>, <<"x-mswrite">>, []};
+all_ext(<<"wrl">>) -> {<<"model">>, <<"vrml">>, []};
+all_ext(<<"wsdl">>) -> {<<"application">>, <<"wsdl+xml">>, []};
+all_ext(<<"wspolicy">>) -> {<<"application">>, <<"wspolicy+xml">>, []};
+all_ext(<<"wtb">>) -> {<<"application">>, <<"vnd.webturbo">>, []};
+all_ext(<<"wvx">>) -> {<<"video">>, <<"x-ms-wvx">>, []};
+all_ext(<<"x32">>) -> {<<"application">>, <<"x-authorware-bin">>, []};
+all_ext(<<"x3db">>) -> {<<"model">>, <<"x3d+binary">>, []};
+all_ext(<<"x3dbz">>) -> {<<"model">>, <<"x3d+binary">>, []};
+all_ext(<<"x3d">>) -> {<<"model">>, <<"x3d+xml">>, []};
+all_ext(<<"x3dv">>) -> {<<"model">>, <<"x3d+vrml">>, []};
+all_ext(<<"x3dvz">>) -> {<<"model">>, <<"x3d+vrml">>, []};
+all_ext(<<"x3dz">>) -> {<<"model">>, <<"x3d+xml">>, []};
+all_ext(<<"xaml">>) -> {<<"application">>, <<"xaml+xml">>, []};
+all_ext(<<"xap">>) -> {<<"application">>, <<"x-silverlight-app">>, []};
+all_ext(<<"xar">>) -> {<<"application">>, <<"vnd.xara">>, []};
+all_ext(<<"xbap">>) -> {<<"application">>, <<"x-ms-xbap">>, []};
+all_ext(<<"xbd">>) -> {<<"application">>, <<"vnd.fujixerox.docuworks.binder">>, []};
+all_ext(<<"xbm">>) -> {<<"image">>, <<"x-xbitmap">>, []};
+all_ext(<<"xdf">>) -> {<<"application">>, <<"xcap-diff+xml">>, []};
+all_ext(<<"xdm">>) -> {<<"application">>, <<"vnd.syncml.dm+xml">>, []};
+all_ext(<<"xdp">>) -> {<<"application">>, <<"vnd.adobe.xdp+xml">>, []};
+all_ext(<<"xdssc">>) -> {<<"application">>, <<"dssc+xml">>, []};
+all_ext(<<"xdw">>) -> {<<"application">>, <<"vnd.fujixerox.docuworks">>, []};
+all_ext(<<"xenc">>) -> {<<"application">>, <<"xenc+xml">>, []};
+all_ext(<<"xer">>) -> {<<"application">>, <<"patch-ops-error+xml">>, []};
+all_ext(<<"xfdf">>) -> {<<"application">>, <<"vnd.adobe.xfdf">>, []};
+all_ext(<<"xfdl">>) -> {<<"application">>, <<"vnd.xfdl">>, []};
+all_ext(<<"xht">>) -> {<<"application">>, <<"xhtml+xml">>, []};
+all_ext(<<"xhtml">>) -> {<<"application">>, <<"xhtml+xml">>, []};
+all_ext(<<"xhvml">>) -> {<<"application">>, <<"xv+xml">>, []};
+all_ext(<<"xif">>) -> {<<"image">>, <<"vnd.xiff">>, []};
+all_ext(<<"xla">>) -> {<<"application">>, <<"vnd.ms-excel">>, []};
+all_ext(<<"xlam">>) -> {<<"application">>, <<"vnd.ms-excel.addin.macroenabled.12">>, []};
+all_ext(<<"xlc">>) -> {<<"application">>, <<"vnd.ms-excel">>, []};
+all_ext(<<"xlf">>) -> {<<"application">>, <<"x-xliff+xml">>, []};
+all_ext(<<"xlm">>) -> {<<"application">>, <<"vnd.ms-excel">>, []};
+all_ext(<<"xls">>) -> {<<"application">>, <<"vnd.ms-excel">>, []};
+all_ext(<<"xlsb">>) -> {<<"application">>, <<"vnd.ms-excel.sheet.binary.macroenabled.12">>, []};
+all_ext(<<"xlsm">>) -> {<<"application">>, <<"vnd.ms-excel.sheet.macroenabled.12">>, []};
+all_ext(<<"xlsx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.spreadsheetml.sheet">>, []};
+all_ext(<<"xlt">>) -> {<<"application">>, <<"vnd.ms-excel">>, []};
+all_ext(<<"xltm">>) -> {<<"application">>, <<"vnd.ms-excel.template.macroenabled.12">>, []};
+all_ext(<<"xltx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.spreadsheetml.template">>, []};
+all_ext(<<"xlw">>) -> {<<"application">>, <<"vnd.ms-excel">>, []};
+all_ext(<<"xm">>) -> {<<"audio">>, <<"xm">>, []};
+all_ext(<<"xml">>) -> {<<"application">>, <<"xml">>, []};
+all_ext(<<"xo">>) -> {<<"application">>, <<"vnd.olpc-sugar">>, []};
+all_ext(<<"xop">>) -> {<<"application">>, <<"xop+xml">>, []};
+all_ext(<<"xpi">>) -> {<<"application">>, <<"x-xpinstall">>, []};
+all_ext(<<"xpl">>) -> {<<"application">>, <<"xproc+xml">>, []};
+all_ext(<<"xpm">>) -> {<<"image">>, <<"x-xpixmap">>, []};
+all_ext(<<"xpr">>) -> {<<"application">>, <<"vnd.is-xpr">>, []};
+all_ext(<<"xps">>) -> {<<"application">>, <<"vnd.ms-xpsdocument">>, []};
+all_ext(<<"xpw">>) -> {<<"application">>, <<"vnd.intercon.formnet">>, []};
+all_ext(<<"xpx">>) -> {<<"application">>, <<"vnd.intercon.formnet">>, []};
+all_ext(<<"xsl">>) -> {<<"application">>, <<"xml">>, []};
+all_ext(<<"xslt">>) -> {<<"application">>, <<"xslt+xml">>, []};
+all_ext(<<"xsm">>) -> {<<"application">>, <<"vnd.syncml+xml">>, []};
+all_ext(<<"xspf">>) -> {<<"application">>, <<"xspf+xml">>, []};
+all_ext(<<"xul">>) -> {<<"application">>, <<"vnd.mozilla.xul+xml">>, []};
+all_ext(<<"xvm">>) -> {<<"application">>, <<"xv+xml">>, []};
+all_ext(<<"xvml">>) -> {<<"application">>, <<"xv+xml">>, []};
+all_ext(<<"xwd">>) -> {<<"image">>, <<"x-xwindowdump">>, []};
+all_ext(<<"xyz">>) -> {<<"chemical">>, <<"x-xyz">>, []};
+all_ext(<<"xz">>) -> {<<"application">>, <<"x-xz">>, []};
+all_ext(<<"yang">>) -> {<<"application">>, <<"yang">>, []};
+all_ext(<<"yin">>) -> {<<"application">>, <<"yin+xml">>, []};
+all_ext(<<"z1">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z2">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z3">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z4">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z5">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z6">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z7">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"z8">>) -> {<<"application">>, <<"x-zmachine">>, []};
+all_ext(<<"zaz">>) -> {<<"application">>, <<"vnd.zzazz.deck+xml">>, []};
+all_ext(<<"zip">>) -> {<<"application">>, <<"zip">>, []};
+all_ext(<<"zir">>) -> {<<"application">>, <<"vnd.zul">>, []};
+all_ext(<<"zirz">>) -> {<<"application">>, <<"vnd.zul">>, []};
+all_ext(<<"zmm">>) -> {<<"application">>, <<"vnd.handheld-entertainment+xml">>, []};
+%% GENERATED
+all_ext(_) -> {<<"application">>, <<"octet-stream">>, []}.
+
+web_ext(<<"css">>) -> {<<"text">>, <<"css">>, []};
+web_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []};
+web_ext(<<"html">>) -> {<<"text">>, <<"html">>, []};
+web_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []};
+web_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []};
+web_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []};
+web_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []};
+web_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []};
+web_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []};
+web_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []};
+web_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []};
+web_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []};
+web_ext(<<"png">>) -> {<<"image">>, <<"png">>, []};
+web_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []};
+web_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []};
+web_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []};
+web_ext(_) -> {<<"application">>, <<"octet-stream">>, []}.
diff --git a/deps/cowlib/src/cow_mimetypes.erl.src b/deps/cowlib/src/cow_mimetypes.erl.src
new file mode 100644 (file)
index 0000000..cf79b5b
--- /dev/null
@@ -0,0 +1,59 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_mimetypes).
+
+-export([all/1]).
+-export([web/1]).
+
+%% @doc Return the mimetype for any file by looking at its extension.
+
+-spec all(file:filename_all()) -> {binary(), binary(), []}.
+all(Path) ->
+       case filename:extension(Path) of
+               <<>> -> {<<"application">>, <<"octet-stream">>, []};
+               << $., Ext/binary >> -> all_ext(Ext)
+       end.
+
+%% @doc Return the mimetype for a Web related file by looking at its extension.
+
+-spec web(file:filename_all()) -> {binary(), binary(), []}.
+web(Path) ->
+       case filename:extension(Path) of
+               <<>> -> {<<"application">>, <<"octet-stream">>, []};
+               << $., Ext/binary >> -> web_ext(Ext)
+       end.
+
+%% Internal.
+
+%% GENERATED
+all_ext(_) -> {<<"application">>, <<"octet-stream">>, []}.
+
+web_ext(<<"css">>) -> {<<"text">>, <<"css">>, []};
+web_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []};
+web_ext(<<"html">>) -> {<<"text">>, <<"html">>, []};
+web_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []};
+web_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []};
+web_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []};
+web_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []};
+web_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []};
+web_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []};
+web_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []};
+web_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []};
+web_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []};
+web_ext(<<"png">>) -> {<<"image">>, <<"png">>, []};
+web_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []};
+web_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []};
+web_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []};
+web_ext(_) -> {<<"application">>, <<"octet-stream">>, []}.
diff --git a/deps/cowlib/src/cow_multipart.erl b/deps/cowlib/src/cow_multipart.erl
new file mode 100644 (file)
index 0000000..d2b45a4
--- /dev/null
@@ -0,0 +1,752 @@
+%% Copyright (c) 2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_multipart).
+
+%% Parsing.
+-export([parse_headers/2]).
+-export([parse_body/2]).
+
+%% Building.
+-export([boundary/0]).
+-export([first_part/2]).
+-export([part/2]).
+-export([close/1]).
+
+%% Headers.
+-export([form_data/1]).
+-export([parse_content_disposition/1]).
+-export([parse_content_transfer_encoding/1]).
+-export([parse_content_type/1]).
+
+-type headers() :: [{iodata(), iodata()}].
+-export_type([headers/0]).
+
+-include("cow_inline.hrl").
+
+-define(TEST1_MIME, <<
+       "This is a message with multiple parts in MIME format.\r\n"
+       "--frontier\r\n"
+       "Content-Type: text/plain\r\n"
+       "\r\n"
+       "This is the body of the message.\r\n"
+       "--frontier\r\n"
+       "Content-Type: application/octet-stream\r\n"
+       "Content-Transfer-Encoding: base64\r\n"
+       "\r\n"
+       "PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n"
+       "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==\r\n"
+       "--frontier--"
+>>).
+-define(TEST1_BOUNDARY, <<"frontier">>).
+
+-define(TEST2_MIME, <<
+       "--AaB03x\r\n"
+       "Content-Disposition: form-data; name=\"submit-name\"\r\n"
+       "\r\n"
+       "Larry\r\n"
+       "--AaB03x\r\n"
+       "Content-Disposition: form-data; name=\"files\"\r\n"
+       "Content-Type: multipart/mixed; boundary=BbC04y\r\n"
+       "\r\n"
+       "--BbC04y\r\n"
+       "Content-Disposition: file; filename=\"file1.txt\"\r\n"
+       "Content-Type: text/plain\r\n"
+       "\r\n"
+       "... contents of file1.txt ...\r\n"
+       "--BbC04y\r\n"
+       "Content-Disposition: file; filename=\"file2.gif\"\r\n"
+       "Content-Type: image/gif\r\n"
+       "Content-Transfer-Encoding: binary\r\n"
+       "\r\n"
+       "...contents of file2.gif...\r\n"
+       "--BbC04y--\r\n"
+       "--AaB03x--"
+>>).
+-define(TEST2_BOUNDARY, <<"AaB03x">>).
+
+-define(TEST3_MIME, <<
+       "This is the preamble.\r\n"
+       "--boundary\r\n"
+       "Content-Type: text/plain\r\n"
+       "\r\n"
+       "This is the body of the message.\r\n"
+       "--boundary--"
+       "\r\nThis is the epilogue. Here it includes leading CRLF"
+>>).
+-define(TEST3_BOUNDARY, <<"boundary">>).
+
+-define(TEST4_MIME, <<
+       "This is the preamble.\r\n"
+       "--boundary\r\n"
+       "Content-Type: text/plain\r\n"
+       "\r\n"
+       "This is the body of the message.\r\n"
+       "--boundary--"
+       "\r\n"
+>>).
+-define(TEST4_BOUNDARY, <<"boundary">>).
+
+%% Parsing.
+%%
+%% The multipart format is defined in RFC 2045.
+
+%% @doc Parse the headers for the next multipart part.
+%%
+%% This function skips any preamble before the boundary.
+%% The preamble may be retrieved using parse_body/2.
+%%
+%% This function will accept input of any size, it is
+%% up to the caller to limit it if needed.
+
+-spec parse_headers(binary(), binary())
+       -> more | {more, binary()}
+       | {ok, headers(), binary()}
+       | {done, binary()}.
+%% If the stream starts with the boundary we can make a few assumptions
+%% and quickly figure out if we got the complete list of headers.
+parse_headers(<< "--", Stream/bits >>, Boundary) ->
+       BoundarySize = byte_size(Boundary),
+       case Stream of
+               %% Last boundary. Return the epilogue.
+               << Boundary:BoundarySize/binary, "--", Stream2/bits >> ->
+                       {done, Stream2};
+               << Boundary:BoundarySize/binary, Stream2/bits >> ->
+                       %% We have all the headers only if there is a \r\n\r\n
+                       %% somewhere in the data after the boundary.
+                       case binary:match(Stream2, <<"\r\n\r\n">>) of
+                               nomatch ->
+                                       more;
+                               _ ->
+                                       before_parse_headers(Stream2)
+                       end;
+               %% If there isn't enough to represent Boundary \r\n\r\n
+               %% then we definitely don't have all the headers.
+               _ when byte_size(Stream) < byte_size(Boundary) + 4 ->
+                       more;
+               %% Otherwise we have preamble data to skip.
+               %% We still got rid of the first two misleading bytes.
+               _ ->
+                       skip_preamble(Stream, Boundary)
+       end;
+%% Otherwise we have preamble data to skip.
+parse_headers(Stream, Boundary) ->
+       skip_preamble(Stream, Boundary).
+
+%% We need to find the boundary and a \r\n\r\n after that.
+%% Since the boundary isn't at the start, it must be right
+%% after a \r\n too.
+skip_preamble(Stream, Boundary) ->
+       case binary:match(Stream, <<"\r\n--", Boundary/bits >>) of
+               %% No boundary, need more data.
+               nomatch ->
+                       %% We can safely skip the size of the stream
+                       %% minus the last 3 bytes which may be a partial boundary.
+                       SkipSize = byte_size(Stream) - 3,
+                       case SkipSize > 0 of
+                               false ->
+                                       more;
+                               true ->
+                                       << _:SkipSize/binary, Stream2/bits >> = Stream,
+                                       {more, Stream2}
+                       end;
+               {Start, Length} ->
+                       Start2 = Start + Length,
+                       << _:Start2/binary, Stream2/bits >> = Stream,
+                       case Stream2 of
+                               %% Last boundary. Return the epilogue.
+                               << "--", Stream3/bits >> ->
+                                       {done, Stream3};
+                               _ ->
+                                       case binary:match(Stream, <<"\r\n\r\n">>) of
+                                               %% We don't have the full headers.
+                                               nomatch ->
+                                                       {more, Stream2};
+                                               _ ->
+                                                       before_parse_headers(Stream2)
+                                       end
+                       end
+       end.
+
+%% There is a line break right after the boundary, skip it.
+%%
+%% We only skip it now because there might be no headers at all,
+%% which means the \r\n\r\n indicating the end of headers also
+%% includes this line break.
+before_parse_headers(<< "\r\n", Stream/bits >>) ->
+       parse_hd_name(Stream, [], <<>>).
+
+parse_hd_name(<< C, Rest/bits >>, H, SoFar) ->
+       case C of
+               $: -> parse_hd_before_value(Rest, H, SoFar);
+               $\s -> parse_hd_name_ws(Rest, H, SoFar);
+               $\t -> parse_hd_name_ws(Rest, H, SoFar);
+               ?INLINE_LOWERCASE(parse_hd_name, Rest, H, SoFar)
+       end.
+
+parse_hd_name_ws(<< C, Rest/bits >>, H, Name) ->
+       case C of
+               $\s -> parse_hd_name_ws(Rest, H, Name);
+               $\t -> parse_hd_name_ws(Rest, H, Name);
+               $: -> parse_hd_before_value(Rest, H, Name)
+       end.
+
+parse_hd_before_value(<< $\s, Rest/bits >>, H, N) ->
+       parse_hd_before_value(Rest, H, N);
+parse_hd_before_value(<< $\t, Rest/bits >>, H, N) ->
+       parse_hd_before_value(Rest, H, N);
+parse_hd_before_value(Buffer, H, N) ->
+       parse_hd_value(Buffer, H, N, <<>>).
+
+parse_hd_value(<< $\r, Rest/bits >>, Headers, Name, SoFar) ->
+       case Rest of
+               << "\n\r\n", Rest2/bits >> ->
+                       {ok, [{Name, SoFar}|Headers], Rest2};
+               << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t ->
+                       parse_hd_value(Rest2, Headers, Name, SoFar);
+               << $\n, Rest2/bits >> ->
+                       parse_hd_name(Rest2, [{Name, SoFar}|Headers], <<>>)
+       end;
+parse_hd_value(<< C, Rest/bits >>, H, N, SoFar) ->
+       parse_hd_value(Rest, H, N, << SoFar/binary, C >>).
+
+%% @doc Parse the body of the current multipart part.
+%%
+%% The body is everything until the next boundary.
+
+-spec parse_body(binary(), binary())
+       -> {ok, binary()} | {ok, binary(), binary()}
+       | done | {done, binary()} | {done, binary(), binary()}.
+parse_body(Stream, Boundary) ->
+       BoundarySize = byte_size(Boundary),
+       case Stream of
+               << "--", Boundary:BoundarySize/binary, _/bits >> ->
+                       done;
+               _ ->
+                       case binary:match(Stream, << "\r\n--", Boundary/bits >>) of
+                               %% No boundary, check for a possible partial at the end.
+                               %% Return more or less of the body depending on the result.
+                               nomatch ->
+                                       StreamSize = byte_size(Stream),
+                                       From = StreamSize - BoundarySize - 3,
+                                       MatchOpts = if
+                                               %% Binary too small to contain boundary, check it fully.
+                                               From < 0 -> [];
+                                               %% Optimize, only check the end of the binary.
+                                               true -> [{scope, {From, StreamSize - From}}]
+                                       end,
+                                       case binary:match(Stream, <<"\r">>, MatchOpts) of
+                                               nomatch ->
+                                                       {ok, Stream};
+                                               {Pos, _} ->
+                                                       case Stream of
+                                                               << Body:Pos/binary >> ->
+                                                                       {ok, Body};
+                                                               << Body:Pos/binary, Rest/bits >> ->
+                                                                       {ok, Body, Rest}
+                                                       end
+                                       end;
+                               %% Boundary found, this is the last chunk of the body.
+                               {Pos, _} ->
+                                       case Stream of
+                                               << Body:Pos/binary, "\r\n" >> ->
+                                                       {done, Body};
+                                               << Body:Pos/binary, "\r\n", Rest/bits >> ->
+                                                       {done, Body, Rest};
+                                               << Body:Pos/binary, Rest/bits >> ->
+                                                       {done, Body, Rest}
+                                       end
+                       end
+       end.
+
+-ifdef(TEST).
+parse_test() ->
+       H1 = [{<<"content-type">>, <<"text/plain">>}],
+       Body1 = <<"This is the body of the message.">>,
+       H2 = lists:sort([{<<"content-type">>, <<"application/octet-stream">>},
+               {<<"content-transfer-encoding">>, <<"base64">>}]),
+       Body2 = <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n"
+               "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>,
+       {ok, H1, Rest} = parse_headers(?TEST1_MIME, ?TEST1_BOUNDARY),
+       {done, Body1, Rest2} = parse_body(Rest, ?TEST1_BOUNDARY),
+       done = parse_body(Rest2, ?TEST1_BOUNDARY),
+       {ok, H2Unsorted, Rest3} = parse_headers(Rest2, ?TEST1_BOUNDARY),
+       H2 = lists:sort(H2Unsorted),
+       {done, Body2, Rest4} = parse_body(Rest3, ?TEST1_BOUNDARY),
+       done = parse_body(Rest4, ?TEST1_BOUNDARY),
+       {done, <<>>} = parse_headers(Rest4, ?TEST1_BOUNDARY),
+       ok.
+
+parse_interleaved_test() ->
+       H1 = [{<<"content-disposition">>, <<"form-data; name=\"submit-name\"">>}],
+       Body1 = <<"Larry">>,
+       H2 = lists:sort([{<<"content-disposition">>, <<"form-data; name=\"files\"">>},
+               {<<"content-type">>, <<"multipart/mixed; boundary=BbC04y">>}]),
+       InH1 = lists:sort([{<<"content-disposition">>, <<"file; filename=\"file1.txt\"">>},
+               {<<"content-type">>, <<"text/plain">>}]),
+       InBody1 = <<"... contents of file1.txt ...">>,
+       InH2 = lists:sort([{<<"content-disposition">>, <<"file; filename=\"file2.gif\"">>},
+               {<<"content-type">>, <<"image/gif">>},
+               {<<"content-transfer-encoding">>, <<"binary">>}]),
+       InBody2 = <<"...contents of file2.gif...">>,
+       {ok, H1, Rest} = parse_headers(?TEST2_MIME, ?TEST2_BOUNDARY),
+       {done, Body1, Rest2} = parse_body(Rest, ?TEST2_BOUNDARY),
+       done = parse_body(Rest2, ?TEST2_BOUNDARY),
+       {ok, H2Unsorted, Rest3} = parse_headers(Rest2, ?TEST2_BOUNDARY),
+       H2 = lists:sort(H2Unsorted),
+       {_, ContentType} = lists:keyfind(<<"content-type">>, 1, H2),
+       {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, InBoundary}]}
+               = parse_content_type(ContentType),
+       {ok, InH1Unsorted, InRest} = parse_headers(Rest3, InBoundary),
+       InH1 = lists:sort(InH1Unsorted),
+       {done, InBody1, InRest2} = parse_body(InRest, InBoundary),
+       done = parse_body(InRest2, InBoundary),
+       {ok, InH2Unsorted, InRest3} = parse_headers(InRest2, InBoundary),
+       InH2 = lists:sort(InH2Unsorted),
+       {done, InBody2, InRest4} = parse_body(InRest3, InBoundary),
+       done = parse_body(InRest4, InBoundary),
+       {done, Rest4} = parse_headers(InRest4, InBoundary),
+       {done, <<>>} = parse_headers(Rest4, ?TEST2_BOUNDARY),
+       ok.
+
+parse_epilogue_test() ->
+       H1 = [{<<"content-type">>, <<"text/plain">>}],
+       Body1 = <<"This is the body of the message.">>,
+       Epilogue = <<"\r\nThis is the epilogue. Here it includes leading CRLF">>,
+       {ok, H1, Rest} = parse_headers(?TEST3_MIME, ?TEST3_BOUNDARY),
+       {done, Body1, Rest2} = parse_body(Rest, ?TEST3_BOUNDARY),
+       done = parse_body(Rest2, ?TEST3_BOUNDARY),
+       {done, Epilogue} = parse_headers(Rest2, ?TEST3_BOUNDARY),
+       ok.
+
+parse_epilogue_crlf_test() ->
+       H1 = [{<<"content-type">>, <<"text/plain">>}],
+       Body1 = <<"This is the body of the message.">>,
+       Epilogue = <<"\r\n">>,
+       {ok, H1, Rest} = parse_headers(?TEST4_MIME, ?TEST4_BOUNDARY),
+       {done, Body1, Rest2} = parse_body(Rest, ?TEST4_BOUNDARY),
+       done = parse_body(Rest2, ?TEST4_BOUNDARY),
+       {done, Epilogue} = parse_headers(Rest2, ?TEST4_BOUNDARY),
+       ok.
+
+parse_partial_test() ->
+       {ok, <<0:8000, "abcdef">>, <<"\rghij">>}
+               = parse_body(<<0:8000, "abcdef\rghij">>, <<"boundary">>),
+       {ok, <<"abcdef">>, <<"\rghij">>}
+               = parse_body(<<"abcdef\rghij">>, <<"boundary">>),
+       {ok, <<"abc">>, <<"\rdef">>}
+               = parse_body(<<"abc\rdef">>, <<"boundaryboundary">>),
+       {ok, <<0:8000, "abcdef">>, <<"\r\nghij">>}
+               = parse_body(<<0:8000, "abcdef\r\nghij">>, <<"boundary">>),
+       {ok, <<"abcdef">>, <<"\r\nghij">>}
+               = parse_body(<<"abcdef\r\nghij">>, <<"boundary">>),
+       {ok, <<"abc">>, <<"\r\ndef">>}
+               = parse_body(<<"abc\r\ndef">>, <<"boundaryboundary">>),
+       {ok, <<"boundary">>, <<"\r">>}
+               = parse_body(<<"boundary\r">>, <<"boundary">>),
+       {ok, <<"boundary">>, <<"\r\n">>}
+               = parse_body(<<"boundary\r\n">>, <<"boundary">>),
+       {ok, <<"boundary">>, <<"\r\n-">>}
+               = parse_body(<<"boundary\r\n-">>, <<"boundary">>),
+       {ok, <<"boundary">>, <<"\r\n--">>}
+               = parse_body(<<"boundary\r\n--">>, <<"boundary">>),
+       ok.
+-endif.
+
+-ifdef(PERF).
+perf_parse_multipart(Stream, Boundary) ->
+       case parse_headers(Stream, Boundary) of
+               {ok, _, Rest} ->
+                       {_, _, Rest2} = parse_body(Rest, Boundary),
+                       perf_parse_multipart(Rest2, Boundary);
+               {done, _} ->
+                       ok
+       end.
+
+horse_parse() ->
+       horse:repeat(50000,
+               perf_parse_multipart(?TEST1_MIME, ?TEST1_BOUNDARY)
+       ).
+-endif.
+
+%% Building.
+
+%% @doc Generate a new random boundary.
+%%
+%% The boundary generated has a low probability of ever appearing
+%% in the data.
+
+-spec boundary() -> binary().
+boundary() ->
+       base64:encode(crypto:rand_bytes(48)).
+
+%% @doc Return the first part's head.
+%%
+%% This works exactly like the part/2 function except there is
+%% no leading \r\n. It's not required to use this function,
+%% just makes the output a little smaller and prettier.
+
+-spec first_part(binary(), headers()) -> iodata().
+first_part(Boundary, Headers) ->
+       [<<"--">>, Boundary, <<"\r\n">>, headers_to_iolist(Headers, [])].
+
+%% @doc Return a part's head.
+
+-spec part(binary(), headers()) -> iodata().
+part(Boundary, Headers) ->
+       [<<"\r\n--">>, Boundary, <<"\r\n">>, headers_to_iolist(Headers, [])].
+
+headers_to_iolist([], Acc) ->
+       lists:reverse([<<"\r\n">>|Acc]);
+headers_to_iolist([{N, V}|Tail], Acc) ->
+       %% We don't want to create a sublist so we list the
+       %% values in reverse order so that it gets reversed properly.
+       headers_to_iolist(Tail, [<<"\r\n">>, V, <<": ">>, N|Acc]).
+
+%% @doc Return the closing delimiter of the multipart message.
+
+-spec close(binary()) -> iodata().
+close(Boundary) ->
+       [<<"\r\n--">>, Boundary, <<"--">>].
+
+-ifdef(TEST).
+build_test() ->
+       Result = string:to_lower(binary_to_list(?TEST1_MIME)),
+       Result = string:to_lower(binary_to_list(iolist_to_binary([
+               <<"This is a message with multiple parts in MIME format.\r\n">>,
+               first_part(?TEST1_BOUNDARY, [{<<"content-type">>, <<"text/plain">>}]),
+               <<"This is the body of the message.">>,
+               part(?TEST1_BOUNDARY, [
+                       {<<"content-type">>, <<"application/octet-stream">>},
+                       {<<"content-transfer-encoding">>, <<"base64">>}]),
+               <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n"
+                       "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>,
+               close(?TEST1_BOUNDARY)
+       ]))),
+       ok.
+
+identity_test() ->
+       B = boundary(),
+       Preamble = <<"This is a message with multiple parts in MIME format.">>,
+       H1 = [{<<"content-type">>, <<"text/plain">>}],
+       Body1 = <<"This is the body of the message.">>,
+       H2 = lists:sort([{<<"content-type">>, <<"application/octet-stream">>},
+               {<<"content-transfer-encoding">>, <<"base64">>}]),
+       Body2 = <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n"
+               "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>,
+       Epilogue = <<"Gotta go fast!">>,
+       M = iolist_to_binary([
+               Preamble,
+               part(B, H1), Body1,
+               part(B, H2), Body2,
+               close(B),
+               Epilogue
+       ]),
+       {done, Preamble, M2} = parse_body(M, B),
+       {ok, H1, M3} = parse_headers(M2, B),
+       {done, Body1, M4} = parse_body(M3, B),
+       {ok, H2Unsorted, M5} = parse_headers(M4, B),
+       H2 = lists:sort(H2Unsorted),
+       {done, Body2, M6} = parse_body(M5, B),
+       {done, Epilogue} = parse_headers(M6, B),
+       ok.
+-endif.
+
+-ifdef(PERF).
+perf_build_multipart() ->
+       B = boundary(),
+       [
+               <<"preamble\r\n">>,
+               first_part(B, [{<<"content-type">>, <<"text/plain">>}]),
+               <<"This is the body of the message.">>,
+               part(B, [
+                       {<<"content-type">>, <<"application/octet-stream">>},
+                       {<<"content-transfer-encoding">>, <<"base64">>}]),
+               <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n"
+                       "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>,
+               close(B),
+               <<"epilogue">>
+       ].
+
+horse_build() ->
+       horse:repeat(50000,
+               perf_build_multipart()
+       ).
+-endif.
+
+%% Headers.
+
+%% @doc Convenience function for extracting information from headers
+%% when parsing a multipart/form-data stream.
+
+-spec form_data(headers())
+       -> {data, binary()}
+       | {file, binary(), binary(), binary(), binary()}.
+form_data(Headers) ->
+       {_, DispositionBin} = lists:keyfind(<<"content-disposition">>, 1, Headers),
+       {<<"form-data">>, Params} = parse_content_disposition(DispositionBin),
+       {_, FieldName} = lists:keyfind(<<"name">>, 1, Params),
+       case lists:keyfind(<<"filename">>, 1, Params) of
+               false ->
+                       {data, FieldName};
+               {_, Filename} ->
+                       Type = case lists:keyfind(<<"content-type">>, 1, Headers) of
+                               false -> <<"text/plain">>;
+                               {_, T} -> T
+                       end,
+                       TransferEncoding = case lists:keyfind(
+                                       <<"content-transfer-encoding">>, 1, Headers) of
+                               false -> <<"7bit">>;
+                               {_, TE} -> TE
+                       end,
+                       {file, FieldName, Filename, Type, TransferEncoding}
+       end.
+
+-ifdef(TEST).
+form_data_test_() ->
+       Tests = [
+               {[{<<"content-disposition">>, <<"form-data; name=\"submit-name\"">>}],
+                       {data, <<"submit-name">>}},
+               {[{<<"content-disposition">>,
+                               <<"form-data; name=\"files\"; filename=\"file1.txt\"">>},
+                       {<<"content-type">>, <<"text/x-plain">>}],
+                       {file, <<"files">>, <<"file1.txt">>,
+                               <<"text/x-plain">>, <<"7bit">>}}
+       ],
+       [{lists:flatten(io_lib:format("~p", [V])),
+               fun() -> R = form_data(V) end} || {V, R} <- Tests].
+-endif.
+
+%% @todo parse_content_description
+%% @todo parse_content_id
+
+%% @doc Parse an RFC 2183 content-disposition value.
+%% @todo Support RFC 2231.
+
+-spec parse_content_disposition(binary())
+       -> {binary(), [{binary(), binary()}]}.
+parse_content_disposition(Bin) ->
+       parse_cd_type(Bin, <<>>).
+
+parse_cd_type(<<>>, Acc) ->
+       {Acc, []};
+parse_cd_type(<< C, Rest/bits >>, Acc) ->
+       case C of
+               $; -> {Acc, parse_before_param(Rest, [])};
+               $\s -> {Acc, parse_before_param(Rest, [])};
+               $\t -> {Acc, parse_before_param(Rest, [])};
+               ?INLINE_LOWERCASE(parse_cd_type, Rest, Acc)
+       end.
+
+-ifdef(TEST).
+parse_content_disposition_test_() ->
+       Tests = [
+               {<<"inline">>, {<<"inline">>, []}},
+               {<<"attachment">>, {<<"attachment">>, []}},
+               {<<"attachment; filename=genome.jpeg;"
+                       "  modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";">>,
+                       {<<"attachment">>, [
+                               {<<"filename">>, <<"genome.jpeg">>},
+                               {<<"modification-date">>, <<"Wed, 12 Feb 1997 16:29:51 -0500">>}
+                       ]}},
+               {<<"form-data; name=\"user\"">>,
+                       {<<"form-data">>, [{<<"name">>, <<"user">>}]}},
+               {<<"form-data; NAME=\"submit-name\"">>,
+                       {<<"form-data">>, [{<<"name">>, <<"submit-name">>}]}},
+               {<<"form-data; name=\"files\"; filename=\"file1.txt\"">>,
+                       {<<"form-data">>, [
+                               {<<"name">>, <<"files">>},
+                               {<<"filename">>, <<"file1.txt">>}
+                       ]}},
+               {<<"file; filename=\"file1.txt\"">>,
+                       {<<"file">>, [{<<"filename">>, <<"file1.txt">>}]}},
+               {<<"file; filename=\"file2.gif\"">>,
+                       {<<"file">>, [{<<"filename">>, <<"file2.gif">>}]}}
+       ],
+       [{V, fun() -> R = parse_content_disposition(V) end} || {V, R} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_content_disposition_attachment() ->
+       horse:repeat(100000,
+               parse_content_disposition(<<"attachment; filename=genome.jpeg;"
+                       "  modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";">>)
+       ).
+
+horse_parse_content_disposition_form_data() ->
+       horse:repeat(100000,
+               parse_content_disposition(
+                       <<"form-data; name=\"files\"; filename=\"file1.txt\"">>)
+       ).
+
+horse_parse_content_disposition_inline() ->
+       horse:repeat(100000,
+               parse_content_disposition(<<"inline">>)
+       ).
+-endif.
+
+%% @doc Parse an RFC 2045 content-transfer-encoding header.
+
+-spec parse_content_transfer_encoding(binary()) -> binary().
+parse_content_transfer_encoding(Bin) ->
+       ?INLINE_LOWERCASE_BC(Bin).
+
+-ifdef(TEST).
+parse_content_transfer_encoding_test_() ->
+       Tests = [
+               {<<"7bit">>, <<"7bit">>},
+               {<<"7BIT">>, <<"7bit">>},
+               {<<"8bit">>, <<"8bit">>},
+               {<<"binary">>, <<"binary">>},
+               {<<"quoted-printable">>, <<"quoted-printable">>},
+               {<<"base64">>, <<"base64">>},
+               {<<"Base64">>, <<"base64">>},
+               {<<"BASE64">>, <<"base64">>},
+               {<<"bAsE64">>, <<"base64">>}
+       ],
+       [{V, fun() -> R = parse_content_transfer_encoding(V) end}
+               || {V, R} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_content_transfer_encoding() ->
+       horse:repeat(100000,
+               parse_content_transfer_encoding(<<"QUOTED-PRINTABLE">>)
+       ).
+-endif.
+
+%% @doc Parse an RFC 2045 content-type header.
+
+-spec parse_content_type(binary())
+       -> {binary(), binary(), [{binary(), binary()}]}.
+parse_content_type(Bin) ->
+       parse_ct_type(Bin, <<>>).
+
+parse_ct_type(<< C, Rest/bits >>, Acc) ->
+       case C of
+               $/ -> parse_ct_subtype(Rest, Acc, <<>>);
+               ?INLINE_LOWERCASE(parse_ct_type, Rest, Acc)
+       end.
+
+parse_ct_subtype(<<>>, Type, Subtype) when Subtype =/= <<>> ->
+       {Type, Subtype, []};
+parse_ct_subtype(<< C, Rest/bits >>, Type, Acc) ->
+       case C of
+               $; -> {Type, Acc, parse_before_param(Rest, [])};
+               $\s -> {Type, Acc, parse_before_param(Rest, [])};
+               $\t -> {Type, Acc, parse_before_param(Rest, [])};
+               ?INLINE_LOWERCASE(parse_ct_subtype, Rest, Type, Acc)
+       end.
+
+-ifdef(TEST).
+parse_content_type_test_() ->
+       Tests = [
+               {<<"image/gif">>,
+                       {<<"image">>, <<"gif">>, []}},
+               {<<"text/plain">>,
+                       {<<"text">>, <<"plain">>, []}},
+               {<<"text/plain; charset=us-ascii">>,
+                       {<<"text">>, <<"plain">>, [{<<"charset">>, <<"us-ascii">>}]}},
+               {<<"text/plain; charset=\"us-ascii\"">>,
+                       {<<"text">>, <<"plain">>, [{<<"charset">>, <<"us-ascii">>}]}},
+               {<<"multipart/form-data; boundary=AaB03x">>,
+                       {<<"multipart">>, <<"form-data">>,
+                               [{<<"boundary">>, <<"AaB03x">>}]}},
+               {<<"multipart/mixed; boundary=BbC04y">>,
+                       {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, <<"BbC04y">>}]}},
+               {<<"multipart/mixed; boundary=--------">>,
+                       {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, <<"--------">>}]}},
+               {<<"application/x-horse; filename=genome.jpeg;"
+                               "  some-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";"
+                               "  charset=us-ascii; empty=; number=12345">>,
+                       {<<"application">>, <<"x-horse">>, [
+                               {<<"filename">>, <<"genome.jpeg">>},
+                               {<<"some-date">>, <<"Wed, 12 Feb 1997 16:29:51 -0500">>},
+                               {<<"charset">>, <<"us-ascii">>},
+                               {<<"empty">>, <<>>},
+                               {<<"number">>, <<"12345">>}
+                       ]}}
+       ],
+       [{V, fun() -> R = parse_content_type(V) end}
+               || {V, R} <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_content_type_zero() ->
+       horse:repeat(100000,
+               parse_content_type(<<"text/plain">>)
+       ).
+
+horse_parse_content_type_one() ->
+       horse:repeat(100000,
+               parse_content_type(<<"text/plain; charset=\"us-ascii\"">>)
+       ).
+
+horse_parse_content_type_five() ->
+       horse:repeat(100000,
+               parse_content_type(<<"application/x-horse; filename=genome.jpeg;"
+                       "  some-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";"
+                       "  charset=us-ascii; empty=; number=12345">>)
+       ).
+-endif.
+
+%% @doc Parse RFC 2045 parameters.
+
+parse_before_param(<<>>, Params) ->
+       lists:reverse(Params);
+parse_before_param(<< C, Rest/bits >>, Params) ->
+       case C of
+               $; -> parse_before_param(Rest, Params);
+               $\s -> parse_before_param(Rest, Params);
+               $\t -> parse_before_param(Rest, Params);
+               ?INLINE_LOWERCASE(parse_param_name, Rest, Params, <<>>)
+       end.
+
+parse_param_name(<<>>, Params, Acc) ->
+       lists:reverse([{Acc, <<>>}|Params]);
+parse_param_name(<< C, Rest/bits >>, Params, Acc) ->
+       case C of
+               $= -> parse_param_value(Rest, Params, Acc);
+               ?INLINE_LOWERCASE(parse_param_name, Rest, Params, Acc)
+       end.
+
+parse_param_value(<<>>, Params, Name) ->
+       lists:reverse([{Name, <<>>}|Params]);
+parse_param_value(<< C, Rest/bits >>, Params, Name) ->
+       case C of
+               $" -> parse_param_quoted_value(Rest, Params, Name, <<>>);
+               $; -> parse_before_param(Rest, [{Name, <<>>}|Params]);
+               $\s -> parse_before_param(Rest, [{Name, <<>>}|Params]);
+               $\t -> parse_before_param(Rest, [{Name, <<>>}|Params]);
+               C -> parse_param_value(Rest, Params, Name, << C >>)
+       end.
+
+parse_param_value(<<>>, Params, Name, Acc) ->
+       lists:reverse([{Name, Acc}|Params]);
+parse_param_value(<< C, Rest/bits >>, Params, Name, Acc) ->
+       case C of
+               $; -> parse_before_param(Rest, [{Name, Acc}|Params]);
+               $\s -> parse_before_param(Rest, [{Name, Acc}|Params]);
+               $\t -> parse_before_param(Rest, [{Name, Acc}|Params]);
+               C -> parse_param_value(Rest, Params, Name, << Acc/binary, C >>)
+       end.
+
+%% We expect a final $" so no need to test for <<>>.
+parse_param_quoted_value(<< $\\, C, Rest/bits >>, Params, Name, Acc) ->
+       parse_param_quoted_value(Rest, Params, Name, << Acc/binary, C >>);
+parse_param_quoted_value(<< $", Rest/bits >>, Params, Name, Acc) ->
+       parse_before_param(Rest, [{Name, Acc}|Params]);
+parse_param_quoted_value(<< C, Rest/bits >>, Params, Name, Acc)
+               when C =/= $\r ->
+       parse_param_quoted_value(Rest, Params, Name, << Acc/binary, C >>).
diff --git a/deps/cowlib/src/cow_qs.erl b/deps/cowlib/src/cow_qs.erl
new file mode 100644 (file)
index 0000000..413562b
--- /dev/null
@@ -0,0 +1,571 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_qs).
+
+-export([parse_qs/1]).
+-export([qs/1]).
+-export([urldecode/1]).
+-export([urlencode/1]).
+
+-type qs_vals() :: [{binary(), binary() | true}].
+
+%% @doc Parse an application/x-www-form-urlencoded string.
+%%
+%% The percent decoding is inlined to greatly improve the performance
+%% by avoiding copying binaries twice (once for extracting, once for
+%% decoding) instead of just extracting the proper representation.
+
+-spec parse_qs(binary()) -> qs_vals().
+parse_qs(B) ->
+       parse_qs_name(B, [], <<>>).
+
+parse_qs_name(<< $%, H, L, Rest/bits >>, Acc, Name) ->
+       C = (unhex(H) bsl 4 bor unhex(L)),
+       parse_qs_name(Rest, Acc, << Name/bits, C >>);
+parse_qs_name(<< $+, Rest/bits >>, Acc, Name) ->
+       parse_qs_name(Rest, Acc, << Name/bits, " " >>);
+parse_qs_name(<< $=, Rest/bits >>, Acc, Name) when Name =/= <<>> ->
+       parse_qs_value(Rest, Acc, Name, <<>>);
+parse_qs_name(<< $&, Rest/bits >>, Acc, Name) ->
+       case Name of
+               <<>> -> parse_qs_name(Rest, Acc, <<>>);
+               _ -> parse_qs_name(Rest, [{Name, true}|Acc], <<>>)
+       end;
+parse_qs_name(<< C, Rest/bits >>, Acc, Name) when C =/= $%, C =/= $= ->
+       parse_qs_name(Rest, Acc, << Name/bits, C >>);
+parse_qs_name(<<>>, Acc, Name) ->
+       case Name of
+               <<>> -> lists:reverse(Acc);
+               _ -> lists:reverse([{Name, true}|Acc])
+       end.
+
+parse_qs_value(<< $%, H, L, Rest/bits >>, Acc, Name, Value) ->
+       C = (unhex(H) bsl 4 bor unhex(L)),
+       parse_qs_value(Rest, Acc, Name, << Value/bits, C >>);
+parse_qs_value(<< $+, Rest/bits >>, Acc, Name, Value) ->
+       parse_qs_value(Rest, Acc, Name, << Value/bits, " " >>);
+parse_qs_value(<< $&, Rest/bits >>, Acc, Name, Value) ->
+       parse_qs_name(Rest, [{Name, Value}|Acc], <<>>);
+parse_qs_value(<< C, Rest/bits >>, Acc, Name, Value) when C =/= $% ->
+       parse_qs_value(Rest, Acc, Name, << Value/bits, C >>);
+parse_qs_value(<<>>, Acc, Name, Value) ->
+       lists:reverse([{Name, Value}|Acc]).
+
+-ifdef(TEST).
+parse_qs_test_() ->
+       Tests = [
+               {<<>>, []},
+               {<<"&">>, []},
+               {<<"a">>, [{<<"a">>, true}]},
+               {<<"a&">>, [{<<"a">>, true}]},
+               {<<"&a">>, [{<<"a">>, true}]},
+               {<<"a&b">>, [{<<"a">>, true}, {<<"b">>, true}]},
+               {<<"a&&b">>, [{<<"a">>, true}, {<<"b">>, true}]},
+               {<<"a&b&">>, [{<<"a">>, true}, {<<"b">>, true}]},
+               {<<"=">>, error},
+               {<<"=b">>, error},
+               {<<"a=">>, [{<<"a">>, <<>>}]},
+               {<<"a=b">>, [{<<"a">>, <<"b">>}]},
+               {<<"a=&b=">>, [{<<"a">>, <<>>}, {<<"b">>, <<>>}]},
+               {<<"a=b&c&d=e">>, [{<<"a">>, <<"b">>},
+                       {<<"c">>, true}, {<<"d">>, <<"e">>}]},
+               {<<"a=b=c&d=e=f&g=h=i">>, [{<<"a">>, <<"b=c">>},
+                       {<<"d">>, <<"e=f">>}, {<<"g">>, <<"h=i">>}]},
+               {<<"+">>, [{<<" ">>, true}]},
+               {<<"+=+">>, [{<<" ">>, <<" ">>}]},
+               {<<"a+b=c+d">>, [{<<"a b">>, <<"c d">>}]},
+               {<<"+a+=+b+&+c+=+d+">>, [{<<" a ">>, <<" b ">>},
+                       {<<" c ">>, <<" d ">>}]},
+               {<<"a%20b=c%20d">>, [{<<"a b">>, <<"c d">>}]},
+               {<<"%25%26%3D=%25%26%3D&_-.=.-_">>, [{<<"%&=">>, <<"%&=">>},
+                       {<<"_-.">>, <<".-_">>}]},
+               {<<"for=extend%2Franch">>, [{<<"for">>, <<"extend/ranch">>}]}
+       ],
+       [{Qs, fun() ->
+               E = try parse_qs(Qs) of
+                       R -> R
+               catch _:_ ->
+                       error
+               end
+       end} || {Qs, E} <- Tests].
+
+parse_qs_identity_test_() ->
+       Tests = [
+               <<"+">>,
+               <<"hl=en&q=erlang+cowboy">>,
+               <<"direction=desc&for=extend%2Franch&sort=updated&state=open">>,
+               <<"i=EWiIXmPj5gl6&v=QowBp0oDLQXdd4x_GwiywA&ip=98.20.31.81&"
+                       "la=en&pg=New8.undertonebrandsafe.com%2F698a2525065ee2"
+                       "60c0b2f2aaad89ab82&re=&sz=1&fc=1&fr=140&br=3&bv=11.0."
+                       "696.16&os=3&ov=&rs=vpl&k=cookies%7Csale%7Cbrowser%7Cm"
+                       "ore%7Cprivacy%7Cstatistics%7Cactivities%7Cauction%7Ce"
+                       "mail%7Cfree%7Cin...&t=112373&xt=5%7C61%7C0&tz=-1&ev=x"
+                       "&tk=&za=1&ortb-za=1&zu=&zl=&ax=U&ay=U&ortb-pid=536454"
+                       ".55&ortb-sid=112373.8&seats=999&ortb-xt=IAB24&ortb-ugc=">>,
+               <<"i=9pQNskA&v=0ySQQd1F&ev=12345678&t=12345&sz=3&ip=67.58."
+                       "236.89&la=en&pg=http%3A%2F%2Fwww.yahoo.com%2Fpage1.ht"
+                       "m&re=http%3A%2F%2Fsearch.google.com&fc=1&fr=1&br=2&bv"
+                       "=3.0.14&os=1&ov=XP&k=cars%2Cford&rs=js&xt=5%7C22%7C23"
+                       "4&tz=%2B180&tk=key1%3Dvalue1%7Ckey2%3Dvalue2&zl=4%2C5"
+                       "%2C6&za=4&zu=competitor.com&ua=Mozilla%2F5.0+%28Windo"
+                       "ws%3B+U%3B+Windows+NT+6.1%3B+en-US%29+AppleWebKit%2F5"
+                       "34.13+%28KHTML%2C+like+Gecko%29+Chrome%2F9.0.597.98+S"
+                       "afari%2F534.13&ortb-za=1%2C6%2C13&ortb-pid=521732&ort"
+                       "b-sid=521732&ortb-xt=IAB3&ortb-ugc=">>
+       ],
+       [{V, fun() -> V = qs(parse_qs(V)) end} || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_parse_qs_shorter() ->
+       horse:repeat(20000,
+               parse_qs(<<"hl=en&q=erlang%20cowboy">>)
+       ).
+
+horse_parse_qs_short() ->
+       horse:repeat(20000,
+               parse_qs(
+                       <<"direction=desc&for=extend%2Franch&sort=updated&state=open">>)
+       ).
+
+horse_parse_qs_long() ->
+       horse:repeat(20000,
+               parse_qs(<<"i=EWiIXmPj5gl6&v=QowBp0oDLQXdd4x_GwiywA&ip=98.20.31.81&"
+                       "la=en&pg=New8.undertonebrandsafe.com%2F698a2525065ee260c0b2f2a"
+                       "aad89ab82&re=&sz=1&fc=1&fr=140&br=3&bv=11.0.696.16&os=3&ov=&rs"
+                       "=vpl&k=cookies%7Csale%7Cbrowser%7Cmore%7Cprivacy%7Cstatistics%"
+                       "7Cactivities%7Cauction%7Cemail%7Cfree%7Cin...&t=112373&xt=5%7C"
+                       "61%7C0&tz=-1&ev=x&tk=&za=1&ortb-za=1&zu=&zl=&ax=U&ay=U&ortb-pi"
+                       "d=536454.55&ortb-sid=112373.8&seats=999&ortb-xt=IAB24&ortb-ugc"
+                       "=">>)
+       ).
+
+horse_parse_qs_longer() ->
+       horse:repeat(20000,
+               parse_qs(<<"i=9pQNskA&v=0ySQQd1F&ev=12345678&t=12345&sz=3&ip=67.58."
+                       "236.89&la=en&pg=http%3A%2F%2Fwww.yahoo.com%2Fpage1.htm&re=http"
+                       "%3A%2F%2Fsearch.google.com&fc=1&fr=1&br=2&bv=3.0.14&os=1&ov=XP"
+                       "&k=cars%2cford&rs=js&xt=5%7c22%7c234&tz=%2b180&tk=key1%3Dvalue"
+                       "1%7Ckey2%3Dvalue2&zl=4,5,6&za=4&zu=competitor.com&ua=Mozilla%2"
+                       "F5.0%20(Windows%3B%20U%3B%20Windows%20NT%206.1%3B%20en-US)%20A"
+                       "ppleWebKit%2F534.13%20(KHTML%2C%20like%20Gecko)%20Chrome%2F9.0"
+                       ".597.98%20Safari%2F534.13&ortb-za=1%2C6%2C13&ortb-pid=521732&o"
+                       "rtb-sid=521732&ortb-xt=IAB3&ortb-ugc=">>)
+       ).
+-endif.
+
+%% @doc Build an application/x-www-form-urlencoded string.
+
+-spec qs(qs_vals()) -> binary().
+qs([]) ->
+       <<>>;
+qs(L) ->
+       qs(L, <<>>).
+
+qs([], Acc) ->
+       << $&, Qs/bits >> = Acc,
+       Qs;
+qs([{Name, true}|Tail], Acc) ->
+       Acc2 = urlencode(Name, << Acc/bits, $& >>),
+       qs(Tail, Acc2);
+qs([{Name, Value}|Tail], Acc) ->
+       Acc2 = urlencode(Name, << Acc/bits, $& >>),
+       Acc3 = urlencode(Value, << Acc2/bits, $= >>),
+       qs(Tail, Acc3).
+
+-define(QS_SHORTER, [
+       {<<"hl">>, <<"en">>},
+       {<<"q">>, <<"erlang cowboy">>}
+]).
+
+-define(QS_SHORT, [
+       {<<"direction">>, <<"desc">>},
+       {<<"for">>, <<"extend/ranch">>},
+       {<<"sort">>, <<"updated">>},
+       {<<"state">>, <<"open">>}
+]).
+
+-define(QS_LONG, [
+       {<<"i">>, <<"EWiIXmPj5gl6">>},
+       {<<"v">>, <<"QowBp0oDLQXdd4x_GwiywA">>},
+       {<<"ip">>, <<"98.20.31.81">>},
+       {<<"la">>, <<"en">>},
+       {<<"pg">>, <<"New8.undertonebrandsafe.com/"
+               "698a2525065ee260c0b2f2aaad89ab82">>},
+       {<<"re">>, <<>>},
+       {<<"sz">>, <<"1">>},
+       {<<"fc">>, <<"1">>},
+       {<<"fr">>, <<"140">>},
+       {<<"br">>, <<"3">>},
+       {<<"bv">>, <<"11.0.696.16">>},
+       {<<"os">>, <<"3">>},
+       {<<"ov">>, <<>>},
+       {<<"rs">>, <<"vpl">>},
+       {<<"k">>, <<"cookies|sale|browser|more|privacy|statistics|"
+               "activities|auction|email|free|in...">>},
+       {<<"t">>, <<"112373">>},
+       {<<"xt">>, <<"5|61|0">>},
+       {<<"tz">>, <<"-1">>},
+       {<<"ev">>, <<"x">>},
+       {<<"tk">>, <<>>},
+       {<<"za">>, <<"1">>},
+       {<<"ortb-za">>, <<"1">>},
+       {<<"zu">>, <<>>},
+       {<<"zl">>, <<>>},
+       {<<"ax">>, <<"U">>},
+       {<<"ay">>, <<"U">>},
+       {<<"ortb-pid">>, <<"536454.55">>},
+       {<<"ortb-sid">>, <<"112373.8">>},
+       {<<"seats">>, <<"999">>},
+       {<<"ortb-xt">>, <<"IAB24">>},
+       {<<"ortb-ugc">>, <<>>}
+]).
+
+-define(QS_LONGER, [
+       {<<"i">>, <<"9pQNskA">>},
+       {<<"v">>, <<"0ySQQd1F">>},
+       {<<"ev">>, <<"12345678">>},
+       {<<"t">>, <<"12345">>},
+       {<<"sz">>, <<"3">>},
+       {<<"ip">>, <<"67.58.236.89">>},
+       {<<"la">>, <<"en">>},
+       {<<"pg">>, <<"http://www.yahoo.com/page1.htm">>},
+       {<<"re">>, <<"http://search.google.com">>},
+       {<<"fc">>, <<"1">>},
+       {<<"fr">>, <<"1">>},
+       {<<"br">>, <<"2">>},
+       {<<"bv">>, <<"3.0.14">>},
+       {<<"os">>, <<"1">>},
+       {<<"ov">>, <<"XP">>},
+       {<<"k">>, <<"cars,ford">>},
+       {<<"rs">>, <<"js">>},
+       {<<"xt">>, <<"5|22|234">>},
+       {<<"tz">>, <<"+180">>},
+       {<<"tk">>, <<"key1=value1|key2=value2">>},
+       {<<"zl">>, <<"4,5,6">>},
+       {<<"za">>, <<"4">>},
+       {<<"zu">>, <<"competitor.com">>},
+       {<<"ua">>, <<"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) "
+               "AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.98 "
+               "Safari/534.13">>},
+       {<<"ortb-za">>, <<"1,6,13">>},
+       {<<"ortb-pid">>, <<"521732">>},
+       {<<"ortb-sid">>, <<"521732">>},
+       {<<"ortb-xt">>, <<"IAB3">>},
+       {<<"ortb-ugc">>, <<>>}
+]).
+
+-ifdef(TEST).
+qs_test_() ->
+       Tests = [
+               {[<<"a">>], error},
+               {[{<<"a">>, <<"b">>, <<"c">>}], error},
+               {[], <<>>},
+               {[{<<"a">>, true}], <<"a">>},
+               {[{<<"a">>, true}, {<<"b">>, true}], <<"a&b">>},
+               {[{<<"a">>, <<>>}], <<"a=">>},
+               {[{<<"a">>, <<"b">>}], <<"a=b">>},
+               {[{<<"a">>, <<>>}, {<<"b">>, <<>>}], <<"a=&b=">>},
+               {[{<<"a">>, <<"b">>}, {<<"c">>, true}, {<<"d">>, <<"e">>}],
+                       <<"a=b&c&d=e">>},
+               {[{<<"a">>, <<"b=c">>}, {<<"d">>, <<"e=f">>}, {<<"g">>, <<"h=i">>}],
+                       <<"a=b%3Dc&d=e%3Df&g=h%3Di">>},
+               {[{<<" ">>, true}], <<"+">>},
+               {[{<<" ">>, <<" ">>}], <<"+=+">>},
+               {[{<<"a b">>, <<"c d">>}], <<"a+b=c+d">>},
+               {[{<<" a ">>, <<" b ">>}, {<<" c ">>, <<" d ">>}],
+                       <<"+a+=+b+&+c+=+d+">>},
+               {[{<<"%&=">>, <<"%&=">>}, {<<"_-.">>, <<".-_">>}],
+                       <<"%25%26%3D=%25%26%3D&_-.=.-_">>},
+               {[{<<"for">>, <<"extend/ranch">>}], <<"for=extend%2Franch">>}
+       ],
+       [{lists:flatten(io_lib:format("~p", [Vals])), fun() ->
+               E = try qs(Vals) of
+                       R -> R
+               catch _:_ ->
+                       error
+               end
+       end} || {Vals, E} <- Tests].
+
+qs_identity_test_() ->
+       Tests = [
+               [{<<"+">>, true}],
+               ?QS_SHORTER,
+               ?QS_SHORT,
+               ?QS_LONG,
+               ?QS_LONGER
+       ],
+       [{lists:flatten(io_lib:format("~p", [V])), fun() ->
+               V = parse_qs(qs(V))
+       end} || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_qs_shorter() ->
+       horse:repeat(20000, qs(?QS_SHORTER)).
+
+horse_qs_short() ->
+       horse:repeat(20000, qs(?QS_SHORT)).
+
+horse_qs_long() ->
+       horse:repeat(20000, qs(?QS_LONG)).
+
+horse_qs_longer() ->
+       horse:repeat(20000, qs(?QS_LONGER)).
+-endif.
+
+%% @doc Decode a percent encoded string (x-www-form-urlencoded rules).
+
+-spec urldecode(B) -> B when B::binary().
+urldecode(B) ->
+       urldecode(B, <<>>).
+
+urldecode(<< $%, H, L, Rest/bits >>, Acc) ->
+       C = (unhex(H) bsl 4 bor unhex(L)),
+       urldecode(Rest, << Acc/bits, C >>);
+urldecode(<< $+, Rest/bits >>, Acc) ->
+       urldecode(Rest, << Acc/bits, " " >>);
+urldecode(<< C, Rest/bits >>, Acc) when C =/= $% ->
+       urldecode(Rest, << Acc/bits, C >>);
+urldecode(<<>>, Acc) ->
+       Acc.
+
+unhex($0) ->  0;
+unhex($1) ->  1;
+unhex($2) ->  2;
+unhex($3) ->  3;
+unhex($4) ->  4;
+unhex($5) ->  5;
+unhex($6) ->  6;
+unhex($7) ->  7;
+unhex($8) ->  8;
+unhex($9) ->  9;
+unhex($A) -> 10;
+unhex($B) -> 11;
+unhex($C) -> 12;
+unhex($D) -> 13;
+unhex($E) -> 14;
+unhex($F) -> 15;
+unhex($a) -> 10;
+unhex($b) -> 11;
+unhex($c) -> 12;
+unhex($d) -> 13;
+unhex($e) -> 14;
+unhex($f) -> 15.
+
+-ifdef(TEST).
+urldecode_test_() ->
+       Tests = [
+               {<<"%20">>, <<" ">>},
+               {<<"+">>, <<" ">>},
+               {<<"%00">>, <<0>>},
+               {<<"%fF">>, <<255>>},
+               {<<"123">>, <<"123">>},
+               {<<"%i5">>, error},
+               {<<"%5">>, error}
+       ],
+       [{Qs, fun() ->
+               E = try urldecode(Qs) of
+                       R -> R
+               catch _:_ ->
+                       error
+               end
+       end} || {Qs, E} <- Tests].
+
+urldecode_identity_test_() ->
+       Tests = [
+               <<"+">>,
+               <<"nothingnothingnothingnothing">>,
+               <<"Small+fast+modular+HTTP+server">>,
+               <<"Small%2C+fast%2C+modular+HTTP+server.">>,
+               <<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83"
+                       "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5"
+                       "%BE%8B%E3%80%9C">>
+       ],
+       [{V, fun() -> V = urlencode(urldecode(V)) end} || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_urldecode() ->
+       horse:repeat(100000,
+               urldecode(<<"nothingnothingnothingnothing">>)
+       ).
+
+horse_urldecode_plus() ->
+       horse:repeat(100000,
+               urldecode(<<"Small+fast+modular+HTTP+server">>)
+       ).
+
+horse_urldecode_hex() ->
+       horse:repeat(100000,
+               urldecode(<<"Small%2C%20fast%2C%20modular%20HTTP%20server.">>)
+       ).
+
+horse_urldecode_jp_hex() ->
+       horse:repeat(100000,
+               urldecode(<<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83"
+                       "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5"
+                       "%BE%8B%E3%80%9C">>)
+       ).
+
+horse_urldecode_mix() ->
+       horse:repeat(100000,
+               urldecode(<<"Small%2C+fast%2C+modular+HTTP+server.">>)
+       ).
+-endif.
+
+%% @doc Percent encode a string (x-www-form-urlencoded rules).
+
+-spec urlencode(B) -> B when B::binary().
+urlencode(B) ->
+       urlencode(B, <<>>).
+
+urlencode(<< $\s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $+ >>);
+urlencode(<< $-, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $- >>);
+urlencode(<< $., Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $. >>);
+urlencode(<< $0, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $0 >>);
+urlencode(<< $1, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $1 >>);
+urlencode(<< $2, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $2 >>);
+urlencode(<< $3, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $3 >>);
+urlencode(<< $4, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $4 >>);
+urlencode(<< $5, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $5 >>);
+urlencode(<< $6, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $6 >>);
+urlencode(<< $7, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $7 >>);
+urlencode(<< $8, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $8 >>);
+urlencode(<< $9, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $9 >>);
+urlencode(<< $A, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $A >>);
+urlencode(<< $B, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $B >>);
+urlencode(<< $C, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $C >>);
+urlencode(<< $D, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $D >>);
+urlencode(<< $E, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $E >>);
+urlencode(<< $F, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $F >>);
+urlencode(<< $G, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $G >>);
+urlencode(<< $H, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $H >>);
+urlencode(<< $I, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $I >>);
+urlencode(<< $J, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $J >>);
+urlencode(<< $K, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $K >>);
+urlencode(<< $L, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $L >>);
+urlencode(<< $M, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $M >>);
+urlencode(<< $N, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $N >>);
+urlencode(<< $O, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $O >>);
+urlencode(<< $P, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $P >>);
+urlencode(<< $Q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Q >>);
+urlencode(<< $R, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $R >>);
+urlencode(<< $S, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $S >>);
+urlencode(<< $T, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $T >>);
+urlencode(<< $U, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $U >>);
+urlencode(<< $V, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $V >>);
+urlencode(<< $W, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $W >>);
+urlencode(<< $X, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $X >>);
+urlencode(<< $Y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Y >>);
+urlencode(<< $Z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Z >>);
+urlencode(<< $_, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $_ >>);
+urlencode(<< $a, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $a >>);
+urlencode(<< $b, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $b >>);
+urlencode(<< $c, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $c >>);
+urlencode(<< $d, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $d >>);
+urlencode(<< $e, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $e >>);
+urlencode(<< $f, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $f >>);
+urlencode(<< $g, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $g >>);
+urlencode(<< $h, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $h >>);
+urlencode(<< $i, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $i >>);
+urlencode(<< $j, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $j >>);
+urlencode(<< $k, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $k >>);
+urlencode(<< $l, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $l >>);
+urlencode(<< $m, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $m >>);
+urlencode(<< $n, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $n >>);
+urlencode(<< $o, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $o >>);
+urlencode(<< $p, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $p >>);
+urlencode(<< $q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $q >>);
+urlencode(<< $r, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $r >>);
+urlencode(<< $s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $s >>);
+urlencode(<< $t, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $t >>);
+urlencode(<< $u, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $u >>);
+urlencode(<< $v, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $v >>);
+urlencode(<< $w, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $w >>);
+urlencode(<< $x, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $x >>);
+urlencode(<< $y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $y >>);
+urlencode(<< $z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $z >>);
+urlencode(<< C, Rest/bits >>, Acc) ->
+       H = hex(C bsr 4),
+       L = hex(C band 16#0f),
+       urlencode(Rest, << Acc/bits, $%, H, L >>);
+urlencode(<<>>, Acc) ->
+       Acc.
+
+hex( 0) -> $0;
+hex( 1) -> $1;
+hex( 2) -> $2;
+hex( 3) -> $3;
+hex( 4) -> $4;
+hex( 5) -> $5;
+hex( 6) -> $6;
+hex( 7) -> $7;
+hex( 8) -> $8;
+hex( 9) -> $9;
+hex(10) -> $A;
+hex(11) -> $B;
+hex(12) -> $C;
+hex(13) -> $D;
+hex(14) -> $E;
+hex(15) -> $F.
+
+-ifdef(TEST).
+urlencode_test_() ->
+       Tests = [
+               {<<255, 0>>, <<"%FF%00">>},
+               {<<255, " ">>, <<"%FF+">>},
+               {<<" ">>, <<"+">>},
+               {<<"aBc123">>, <<"aBc123">>},
+               {<<".-_">>, <<".-_">>}
+       ],
+       [{V, fun() -> E = urlencode(V) end} || {V, E} <- Tests].
+
+urlencode_identity_test_() ->
+       Tests = [
+               <<"+">>,
+               <<"nothingnothingnothingnothing">>,
+               <<"Small fast modular HTTP server">>,
+               <<"Small, fast, modular HTTP server.">>,
+               <<227,131,132,227,130,164,227,131,179,227,130,189,227,
+                       130,166,227,131,171,227,128,156,232,188,170,229,187,187,227,
+                       129,153,227,130,139,230,151,139,229,190,139,227,128,156>>
+       ],
+       [{V, fun() -> V = urldecode(urlencode(V)) end} || V <- Tests].
+-endif.
+
+-ifdef(PERF).
+horse_urlencode() ->
+       horse:repeat(100000,
+               urlencode(<<"nothingnothingnothingnothing">>)
+       ).
+
+horse_urlencode_plus() ->
+       horse:repeat(100000,
+               urlencode(<<"Small fast modular HTTP server">>)
+       ).
+
+horse_urlencode_jp() ->
+       horse:repeat(100000,
+               urlencode(<<227,131,132,227,130,164,227,131,179,227,130,189,227,
+                       130,166,227,131,171,227,128,156,232,188,170,229,187,187,227,
+                       129,153,227,130,139,230,151,139,229,190,139,227,128,156>>)
+       ).
+
+horse_urlencode_mix() ->
+       horse:repeat(100000,
+               urlencode(<<"Small, fast, modular HTTP server.">>)
+       ).
+-endif.
diff --git a/deps/cowlib/src/cow_spdy.erl b/deps/cowlib/src/cow_spdy.erl
new file mode 100644 (file)
index 0000000..59c1ba4
--- /dev/null
@@ -0,0 +1,265 @@
+%% Copyright (c) 2013-2014, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(cow_spdy).
+
+%% Zstream.
+-export([deflate_init/0]).
+-export([inflate_init/0]).
+
+%% Parse.
+-export([split/1]).
+-export([parse/2]).
+
+%% Build.
+-export([data/3]).
+-export([syn_stream/12]).
+-export([syn_reply/6]).
+-export([rst_stream/2]).
+%% @todo settings
+-export([ping/1]).
+-export([goaway/2]).
+%% @todo headers
+%% @todo window_update
+
+-include("cow_spdy.hrl").
+
+%% Zstream.
+
+deflate_init() ->
+       Zdef = zlib:open(),
+       ok = zlib:deflateInit(Zdef),
+       _ = zlib:deflateSetDictionary(Zdef, ?ZDICT),
+       Zdef.
+
+inflate_init() ->
+       Zinf = zlib:open(),
+       ok = zlib:inflateInit(Zinf),
+       Zinf.
+
+%% Parse.
+
+split(Data = << _:40, Length:24, _/bits >>)
+               when byte_size(Data) >= Length + 8 ->
+       Length2 = Length + 8,
+       << Frame:Length2/binary, Rest/bits >> = Data,
+       {true, Frame, Rest};
+split(_) ->
+       false.
+
+parse(<< 0:1, StreamID:31, 0:7, IsFinFlag:1, _:24, Data/bits >>, _) ->
+       {data, StreamID, from_flag(IsFinFlag), Data};
+parse(<< 1:1, 3:15, 1:16, 0:6, IsUnidirectionalFlag:1, IsFinFlag:1,
+               _:25, StreamID:31, _:1, AssocToStreamID:31, Priority:3, _:5,
+               0:8, Rest/bits >>, Zinf) ->
+       case parse_headers(Rest, Zinf) of
+               {ok, Headers, [{<<":host">>, Host}, {<<":method">>, Method},
+                               {<<":path">>, Path}, {<<":scheme">>, Scheme},
+                               {<<":version">>, Version}]} ->
+                       {syn_stream, StreamID, AssocToStreamID, from_flag(IsFinFlag),
+                               from_flag(IsUnidirectionalFlag), Priority, Method,
+                               Scheme, Host, Path, Version, Headers};
+               _ ->
+                       {error, badprotocol}
+       end;
+parse(<< 1:1, 3:15, 2:16, 0:7, IsFinFlag:1, _:25,
+               StreamID:31, Rest/bits >>, Zinf) ->
+       case parse_headers(Rest, Zinf) of
+               {ok, Headers, [{<<":status">>, Status}, {<<":version">>, Version}]} ->
+                       {syn_reply, StreamID, from_flag(IsFinFlag),
+                               Status, Version, Headers};
+               _ ->
+                       {error, badprotocol}
+       end;
+parse(<< 1:1, 3:15, 3:16, 0:8, _:56, StatusCode:32 >>, _)
+               when StatusCode =:= 0; StatusCode > 11 ->
+       {error, badprotocol};
+parse(<< 1:1, 3:15, 3:16, 0:8, _:25, StreamID:31, StatusCode:32 >>, _) ->
+       Status = case StatusCode of
+               1 -> protocol_error;
+               2 -> invalid_stream;
+               3 -> refused_stream;
+               4 -> unsupported_version;
+               5 -> cancel;
+               6 -> internal_error;
+               7 -> flow_control_error;
+               8 -> stream_in_use;
+               9 -> stream_already_closed;
+               10 -> invalid_credentials;
+               11 -> frame_too_large
+       end,
+       {rst_stream, StreamID, Status};
+parse(<< 1:1, 3:15, 4:16, 0:7, ClearSettingsFlag:1, _:24,
+               NbEntries:32, Rest/bits >>, _) ->
+       try
+               Settings = [begin
+                       Is0 = 0,
+                       Key = case ID of
+                               1 -> upload_bandwidth;
+                               2 -> download_bandwidth;
+                               3 -> round_trip_time;
+                               4 -> max_concurrent_streams;
+                               5 -> current_cwnd;
+                               6 -> download_retrans_rate;
+                               7 -> initial_window_size;
+                               8 -> client_certificate_vector_size
+                       end,
+                       {Key, Value, from_flag(PersistFlag), from_flag(WasPersistedFlag)}
+               end || << Is0:6, WasPersistedFlag:1, PersistFlag:1,
+                       ID:24, Value:32 >> <= Rest],
+               NbEntries = length(Settings),
+               {settings, from_flag(ClearSettingsFlag), Settings}
+       catch _:_ ->
+               {error, badprotocol}
+       end;
+parse(<< 1:1, 3:15, 6:16, 0:8, _:24, PingID:32 >>, _) ->
+       {ping, PingID};
+parse(<< 1:1, 3:15, 7:16, 0:8, _:56, StatusCode:32 >>, _)
+               when StatusCode > 2 ->
+       {error, badprotocol};
+parse(<< 1:1, 3:15, 7:16, 0:8, _:25, LastGoodStreamID:31,
+               StatusCode:32 >>, _) ->
+       Status = case StatusCode of
+               0 -> ok;
+               1 -> protocol_error;
+               2 -> internal_error
+       end,
+       {goaway, LastGoodStreamID, Status};
+parse(<< 1:1, 3:15, 8:16, 0:7, IsFinFlag:1, _:25, StreamID:31,
+               Rest/bits >>, Zinf) ->
+       case parse_headers(Rest, Zinf) of
+               {ok, Headers, []} ->
+                       {headers, StreamID, from_flag(IsFinFlag), Headers};
+               _ ->
+                       {error, badprotocol}
+       end;
+parse(<< 1:1, 3:15, 9:16, 0:8, _:57, 0:31 >>, _) ->
+       {error, badprotocol};
+parse(<< 1:1, 3:15, 9:16, 0:8, _:25, StreamID:31,
+               _:1, DeltaWindowSize:31 >>, _) ->
+       {window_update, StreamID, DeltaWindowSize};
+parse(_, _) ->
+       {error, badprotocol}.
+
+parse_headers(Data, Zinf) ->
+       [<< NbHeaders:32, Rest/bits >>] = inflate(Zinf, Data),
+       parse_headers(Rest, NbHeaders, [], []).
+
+parse_headers(<<>>, 0, Headers, SpHeaders) ->
+       {ok, lists:reverse(Headers), lists:sort(SpHeaders)};
+parse_headers(<<>>, _, _, _) ->
+       error;
+parse_headers(_, 0, _, _) ->
+       error;
+parse_headers(<< 0:32, _/bits >>, _, _, _) ->
+       error;
+parse_headers(<< L1:32, Key:L1/binary, L2:32, Value:L2/binary, Rest/bits >>,
+               NbHeaders, Acc, SpAcc) ->
+       case Key of
+               << $:, _/bits >> ->
+                       parse_headers(Rest, NbHeaders - 1, Acc,
+                               lists:keystore(Key, 1, SpAcc, {Key, Value}));
+               _ ->
+                       parse_headers(Rest, NbHeaders - 1, [{Key, Value}|Acc], SpAcc)
+       end.
+
+inflate(Zinf, Data) ->
+       try
+               zlib:inflate(Zinf, Data)
+       catch _:_ ->
+               ok = zlib:inflateSetDictionary(Zinf, ?ZDICT),
+               zlib:inflate(Zinf, <<>>)
+       end.
+
+from_flag(0) -> false;
+from_flag(1) -> true.
+
+%% Build.
+
+data(StreamID, IsFin, Data) ->
+       IsFinFlag = to_flag(IsFin),
+       Length = iolist_size(Data),
+       [<< 0:1, StreamID:31, 0:7, IsFinFlag:1, Length:24 >>, Data].
+
+syn_stream(Zdef, StreamID, AssocToStreamID, IsFin, IsUnidirectional,
+               Priority, Method, Scheme, Host, Path, Version, Headers) ->
+       IsFinFlag = to_flag(IsFin),
+       IsUnidirectionalFlag = to_flag(IsUnidirectional),
+       HeaderBlock = build_headers(Zdef, [
+               {<<":method">>, Method},
+               {<<":scheme">>, Scheme},
+               {<<":host">>, Host},
+               {<<":path">>, Path},
+               {<<":version">>, Version}
+               |Headers]),
+       Length = 10 + iolist_size(HeaderBlock),
+       [<< 1:1, 3:15, 1:16, 0:6, IsUnidirectionalFlag:1, IsFinFlag:1,
+               Length:24, 0:1, StreamID:31, 0:1, AssocToStreamID:31,
+               Priority:3, 0:5, 0:8 >>, HeaderBlock].
+
+syn_reply(Zdef, StreamID, IsFin, Status, Version, Headers) ->
+       IsFinFlag = to_flag(IsFin),
+       HeaderBlock = build_headers(Zdef, [
+               {<<":status">>, Status},
+               {<<":version">>, Version}
+               |Headers]),
+       Length = 4 + iolist_size(HeaderBlock),
+       [<< 1:1, 3:15, 2:16, 0:7, IsFinFlag:1, Length:24,
+               0:1, StreamID:31 >>, HeaderBlock].
+
+rst_stream(StreamID, Status) ->
+       StatusCode = case Status of
+               protocol_error -> 1;
+               invalid_stream -> 2;
+               refused_stream -> 3;
+               unsupported_version -> 4;
+               cancel -> 5;
+               internal_error -> 6;
+               flow_control_error -> 7;
+               stream_in_use -> 8;
+               stream_already_closed -> 9;
+               invalid_credentials -> 10;
+               frame_too_large -> 11
+       end,
+       << 1:1, 3:15, 3:16, 0:8, 8:24,
+               0:1, StreamID:31, StatusCode:32 >>.
+
+%% @todo settings
+
+ping(PingID) ->
+       << 1:1, 3:15, 6:16, 0:8, 4:24, PingID:32 >>.
+
+goaway(LastGoodStreamID, Status) ->
+       StatusCode = case Status of
+               ok -> 0;
+               protocol_error -> 1;
+               internal_error -> 2
+       end,
+       << 1:1, 3:15, 7:16, 0:8, 8:24,
+               0:1, LastGoodStreamID:31, StatusCode:32 >>.
+
+%% @todo headers
+%% @todo window_update
+
+build_headers(Zdef, Headers) ->
+       NbHeaders = length(Headers),
+       Headers2 = [begin
+               L1 = iolist_size(Key),
+               L2 = iolist_size(Value),
+               [<< L1:32 >>, Key, << L2:32 >>, Value]
+       end || {Key, Value} <- Headers],
+       zlib:deflate(Zdef, [<< NbHeaders:32 >>, Headers2], full).
+
+to_flag(false) -> 0;
+to_flag(true) -> 1.
diff --git a/deps/cowlib/src/cow_spdy.hrl b/deps/cowlib/src/cow_spdy.hrl
new file mode 100644 (file)
index 0000000..9637b1c
--- /dev/null
@@ -0,0 +1,181 @@
+%% Zlib dictionary.
+
+-define(ZDICT, <<
+       16#00, 16#00, 16#00, 16#07, 16#6f, 16#70, 16#74, 16#69,
+       16#6f, 16#6e, 16#73, 16#00, 16#00, 16#00, 16#04, 16#68,
+       16#65, 16#61, 16#64, 16#00, 16#00, 16#00, 16#04, 16#70,
+       16#6f, 16#73, 16#74, 16#00, 16#00, 16#00, 16#03, 16#70,
+       16#75, 16#74, 16#00, 16#00, 16#00, 16#06, 16#64, 16#65,
+       16#6c, 16#65, 16#74, 16#65, 16#00, 16#00, 16#00, 16#05,
+       16#74, 16#72, 16#61, 16#63, 16#65, 16#00, 16#00, 16#00,
+       16#06, 16#61, 16#63, 16#63, 16#65, 16#70, 16#74, 16#00,
+       16#00, 16#00, 16#0e, 16#61, 16#63, 16#63, 16#65, 16#70,
+       16#74, 16#2d, 16#63, 16#68, 16#61, 16#72, 16#73, 16#65,
+       16#74, 16#00, 16#00, 16#00, 16#0f, 16#61, 16#63, 16#63,
+       16#65, 16#70, 16#74, 16#2d, 16#65, 16#6e, 16#63, 16#6f,
+       16#64, 16#69, 16#6e, 16#67, 16#00, 16#00, 16#00, 16#0f,
+       16#61, 16#63, 16#63, 16#65, 16#70, 16#74, 16#2d, 16#6c,
+       16#61, 16#6e, 16#67, 16#75, 16#61, 16#67, 16#65, 16#00,
+       16#00, 16#00, 16#0d, 16#61, 16#63, 16#63, 16#65, 16#70,
+       16#74, 16#2d, 16#72, 16#61, 16#6e, 16#67, 16#65, 16#73,
+       16#00, 16#00, 16#00, 16#03, 16#61, 16#67, 16#65, 16#00,
+       16#00, 16#00, 16#05, 16#61, 16#6c, 16#6c, 16#6f, 16#77,
+       16#00, 16#00, 16#00, 16#0d, 16#61, 16#75, 16#74, 16#68,
+       16#6f, 16#72, 16#69, 16#7a, 16#61, 16#74, 16#69, 16#6f,
+       16#6e, 16#00, 16#00, 16#00, 16#0d, 16#63, 16#61, 16#63,
+       16#68, 16#65, 16#2d, 16#63, 16#6f, 16#6e, 16#74, 16#72,
+       16#6f, 16#6c, 16#00, 16#00, 16#00, 16#0a, 16#63, 16#6f,
+       16#6e, 16#6e, 16#65, 16#63, 16#74, 16#69, 16#6f, 16#6e,
+       16#00, 16#00, 16#00, 16#0c, 16#63, 16#6f, 16#6e, 16#74,
+       16#65, 16#6e, 16#74, 16#2d, 16#62, 16#61, 16#73, 16#65,
+       16#00, 16#00, 16#00, 16#10, 16#63, 16#6f, 16#6e, 16#74,
+       16#65, 16#6e, 16#74, 16#2d, 16#65, 16#6e, 16#63, 16#6f,
+       16#64, 16#69, 16#6e, 16#67, 16#00, 16#00, 16#00, 16#10,
+       16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, 16#74, 16#2d,
+       16#6c, 16#61, 16#6e, 16#67, 16#75, 16#61, 16#67, 16#65,
+       16#00, 16#00, 16#00, 16#0e, 16#63, 16#6f, 16#6e, 16#74,
+       16#65, 16#6e, 16#74, 16#2d, 16#6c, 16#65, 16#6e, 16#67,
+       16#74, 16#68, 16#00, 16#00, 16#00, 16#10, 16#63, 16#6f,
+       16#6e, 16#74, 16#65, 16#6e, 16#74, 16#2d, 16#6c, 16#6f,
+       16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00,
+       16#00, 16#0b, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e,
+       16#74, 16#2d, 16#6d, 16#64, 16#35, 16#00, 16#00, 16#00,
+       16#0d, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, 16#74,
+       16#2d, 16#72, 16#61, 16#6e, 16#67, 16#65, 16#00, 16#00,
+       16#00, 16#0c, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e,
+       16#74, 16#2d, 16#74, 16#79, 16#70, 16#65, 16#00, 16#00,
+       16#00, 16#04, 16#64, 16#61, 16#74, 16#65, 16#00, 16#00,
+       16#00, 16#04, 16#65, 16#74, 16#61, 16#67, 16#00, 16#00,
+       16#00, 16#06, 16#65, 16#78, 16#70, 16#65, 16#63, 16#74,
+       16#00, 16#00, 16#00, 16#07, 16#65, 16#78, 16#70, 16#69,
+       16#72, 16#65, 16#73, 16#00, 16#00, 16#00, 16#04, 16#66,
+       16#72, 16#6f, 16#6d, 16#00, 16#00, 16#00, 16#04, 16#68,
+       16#6f, 16#73, 16#74, 16#00, 16#00, 16#00, 16#08, 16#69,
+       16#66, 16#2d, 16#6d, 16#61, 16#74, 16#63, 16#68, 16#00,
+       16#00, 16#00, 16#11, 16#69, 16#66, 16#2d, 16#6d, 16#6f,
+       16#64, 16#69, 16#66, 16#69, 16#65, 16#64, 16#2d, 16#73,
+       16#69, 16#6e, 16#63, 16#65, 16#00, 16#00, 16#00, 16#0d,
+       16#69, 16#66, 16#2d, 16#6e, 16#6f, 16#6e, 16#65, 16#2d,
+       16#6d, 16#61, 16#74, 16#63, 16#68, 16#00, 16#00, 16#00,
+       16#08, 16#69, 16#66, 16#2d, 16#72, 16#61, 16#6e, 16#67,
+       16#65, 16#00, 16#00, 16#00, 16#13, 16#69, 16#66, 16#2d,
+       16#75, 16#6e, 16#6d, 16#6f, 16#64, 16#69, 16#66, 16#69,
+       16#65, 16#64, 16#2d, 16#73, 16#69, 16#6e, 16#63, 16#65,
+       16#00, 16#00, 16#00, 16#0d, 16#6c, 16#61, 16#73, 16#74,
+       16#2d, 16#6d, 16#6f, 16#64, 16#69, 16#66, 16#69, 16#65,
+       16#64, 16#00, 16#00, 16#00, 16#08, 16#6c, 16#6f, 16#63,
+       16#61, 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, 16#00,
+       16#0c, 16#6d, 16#61, 16#78, 16#2d, 16#66, 16#6f, 16#72,
+       16#77, 16#61, 16#72, 16#64, 16#73, 16#00, 16#00, 16#00,
+       16#06, 16#70, 16#72, 16#61, 16#67, 16#6d, 16#61, 16#00,
+       16#00, 16#00, 16#12, 16#70, 16#72, 16#6f, 16#78, 16#79,
+       16#2d, 16#61, 16#75, 16#74, 16#68, 16#65, 16#6e, 16#74,
+       16#69, 16#63, 16#61, 16#74, 16#65, 16#00, 16#00, 16#00,
+       16#13, 16#70, 16#72, 16#6f, 16#78, 16#79, 16#2d, 16#61,
+       16#75, 16#74, 16#68, 16#6f, 16#72, 16#69, 16#7a, 16#61,
+       16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, 16#00, 16#05,
+       16#72, 16#61, 16#6e, 16#67, 16#65, 16#00, 16#00, 16#00,
+       16#07, 16#72, 16#65, 16#66, 16#65, 16#72, 16#65, 16#72,
+       16#00, 16#00, 16#00, 16#0b, 16#72, 16#65, 16#74, 16#72,
+       16#79, 16#2d, 16#61, 16#66, 16#74, 16#65, 16#72, 16#00,
+       16#00, 16#00, 16#06, 16#73, 16#65, 16#72, 16#76, 16#65,
+       16#72, 16#00, 16#00, 16#00, 16#02, 16#74, 16#65, 16#00,
+       16#00, 16#00, 16#07, 16#74, 16#72, 16#61, 16#69, 16#6c,
+       16#65, 16#72, 16#00, 16#00, 16#00, 16#11, 16#74, 16#72,
+       16#61, 16#6e, 16#73, 16#66, 16#65, 16#72, 16#2d, 16#65,
+       16#6e, 16#63, 16#6f, 16#64, 16#69, 16#6e, 16#67, 16#00,
+       16#00, 16#00, 16#07, 16#75, 16#70, 16#67, 16#72, 16#61,
+       16#64, 16#65, 16#00, 16#00, 16#00, 16#0a, 16#75, 16#73,
+       16#65, 16#72, 16#2d, 16#61, 16#67, 16#65, 16#6e, 16#74,
+       16#00, 16#00, 16#00, 16#04, 16#76, 16#61, 16#72, 16#79,
+       16#00, 16#00, 16#00, 16#03, 16#76, 16#69, 16#61, 16#00,
+       16#00, 16#00, 16#07, 16#77, 16#61, 16#72, 16#6e, 16#69,
+       16#6e, 16#67, 16#00, 16#00, 16#00, 16#10, 16#77, 16#77,
+       16#77, 16#2d, 16#61, 16#75, 16#74, 16#68, 16#65, 16#6e,
+       16#74, 16#69, 16#63, 16#61, 16#74, 16#65, 16#00, 16#00,
+       16#00, 16#06, 16#6d, 16#65, 16#74, 16#68, 16#6f, 16#64,
+       16#00, 16#00, 16#00, 16#03, 16#67, 16#65, 16#74, 16#00,
+       16#00, 16#00, 16#06, 16#73, 16#74, 16#61, 16#74, 16#75,
+       16#73, 16#00, 16#00, 16#00, 16#06, 16#32, 16#30, 16#30,
+       16#20, 16#4f, 16#4b, 16#00, 16#00, 16#00, 16#07, 16#76,
+       16#65, 16#72, 16#73, 16#69, 16#6f, 16#6e, 16#00, 16#00,
+       16#00, 16#08, 16#48, 16#54, 16#54, 16#50, 16#2f, 16#31,
+       16#2e, 16#31, 16#00, 16#00, 16#00, 16#03, 16#75, 16#72,
+       16#6c, 16#00, 16#00, 16#00, 16#06, 16#70, 16#75, 16#62,
+       16#6c, 16#69, 16#63, 16#00, 16#00, 16#00, 16#0a, 16#73,
+       16#65, 16#74, 16#2d, 16#63, 16#6f, 16#6f, 16#6b, 16#69,
+       16#65, 16#00, 16#00, 16#00, 16#0a, 16#6b, 16#65, 16#65,
+       16#70, 16#2d, 16#61, 16#6c, 16#69, 16#76, 16#65, 16#00,
+       16#00, 16#00, 16#06, 16#6f, 16#72, 16#69, 16#67, 16#69,
+       16#6e, 16#31, 16#30, 16#30, 16#31, 16#30, 16#31, 16#32,
+       16#30, 16#31, 16#32, 16#30, 16#32, 16#32, 16#30, 16#35,
+       16#32, 16#30, 16#36, 16#33, 16#30, 16#30, 16#33, 16#30,
+       16#32, 16#33, 16#30, 16#33, 16#33, 16#30, 16#34, 16#33,
+       16#30, 16#35, 16#33, 16#30, 16#36, 16#33, 16#30, 16#37,
+       16#34, 16#30, 16#32, 16#34, 16#30, 16#35, 16#34, 16#30,
+       16#36, 16#34, 16#30, 16#37, 16#34, 16#30, 16#38, 16#34,
+       16#30, 16#39, 16#34, 16#31, 16#30, 16#34, 16#31, 16#31,
+       16#34, 16#31, 16#32, 16#34, 16#31, 16#33, 16#34, 16#31,
+       16#34, 16#34, 16#31, 16#35, 16#34, 16#31, 16#36, 16#34,
+       16#31, 16#37, 16#35, 16#30, 16#32, 16#35, 16#30, 16#34,
+       16#35, 16#30, 16#35, 16#32, 16#30, 16#33, 16#20, 16#4e,
+       16#6f, 16#6e, 16#2d, 16#41, 16#75, 16#74, 16#68, 16#6f,
+       16#72, 16#69, 16#74, 16#61, 16#74, 16#69, 16#76, 16#65,
+       16#20, 16#49, 16#6e, 16#66, 16#6f, 16#72, 16#6d, 16#61,
+       16#74, 16#69, 16#6f, 16#6e, 16#32, 16#30, 16#34, 16#20,
+       16#4e, 16#6f, 16#20, 16#43, 16#6f, 16#6e, 16#74, 16#65,
+       16#6e, 16#74, 16#33, 16#30, 16#31, 16#20, 16#4d, 16#6f,
+       16#76, 16#65, 16#64, 16#20, 16#50, 16#65, 16#72, 16#6d,
+       16#61, 16#6e, 16#65, 16#6e, 16#74, 16#6c, 16#79, 16#34,
+       16#30, 16#30, 16#20, 16#42, 16#61, 16#64, 16#20, 16#52,
+       16#65, 16#71, 16#75, 16#65, 16#73, 16#74, 16#34, 16#30,
+       16#31, 16#20, 16#55, 16#6e, 16#61, 16#75, 16#74, 16#68,
+       16#6f, 16#72, 16#69, 16#7a, 16#65, 16#64, 16#34, 16#30,
+       16#33, 16#20, 16#46, 16#6f, 16#72, 16#62, 16#69, 16#64,
+       16#64, 16#65, 16#6e, 16#34, 16#30, 16#34, 16#20, 16#4e,
+       16#6f, 16#74, 16#20, 16#46, 16#6f, 16#75, 16#6e, 16#64,
+       16#35, 16#30, 16#30, 16#20, 16#49, 16#6e, 16#74, 16#65,
+       16#72, 16#6e, 16#61, 16#6c, 16#20, 16#53, 16#65, 16#72,
+       16#76, 16#65, 16#72, 16#20, 16#45, 16#72, 16#72, 16#6f,
+       16#72, 16#35, 16#30, 16#31, 16#20, 16#4e, 16#6f, 16#74,
+       16#20, 16#49, 16#6d, 16#70, 16#6c, 16#65, 16#6d, 16#65,
+       16#6e, 16#74, 16#65, 16#64, 16#35, 16#30, 16#33, 16#20,
+       16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20,
+       16#55, 16#6e, 16#61, 16#76, 16#61, 16#69, 16#6c, 16#61,
+       16#62, 16#6c, 16#65, 16#4a, 16#61, 16#6e, 16#20, 16#46,
+       16#65, 16#62, 16#20, 16#4d, 16#61, 16#72, 16#20, 16#41,
+       16#70, 16#72, 16#20, 16#4d, 16#61, 16#79, 16#20, 16#4a,
+       16#75, 16#6e, 16#20, 16#4a, 16#75, 16#6c, 16#20, 16#41,
+       16#75, 16#67, 16#20, 16#53, 16#65, 16#70, 16#74, 16#20,
+       16#4f, 16#63, 16#74, 16#20, 16#4e, 16#6f, 16#76, 16#20,
+       16#44, 16#65, 16#63, 16#20, 16#30, 16#30, 16#3a, 16#30,
+       16#30, 16#3a, 16#30, 16#30, 16#20, 16#4d, 16#6f, 16#6e,
+       16#2c, 16#20, 16#54, 16#75, 16#65, 16#2c, 16#20, 16#57,
+       16#65, 16#64, 16#2c, 16#20, 16#54, 16#68, 16#75, 16#2c,
+       16#20, 16#46, 16#72, 16#69, 16#2c, 16#20, 16#53, 16#61,
+       16#74, 16#2c, 16#20, 16#53, 16#75, 16#6e, 16#2c, 16#20,
+       16#47, 16#4d, 16#54, 16#63, 16#68, 16#75, 16#6e, 16#6b,
+       16#65, 16#64, 16#2c, 16#74, 16#65, 16#78, 16#74, 16#2f,
+       16#68, 16#74, 16#6d, 16#6c, 16#2c, 16#69, 16#6d, 16#61,
+       16#67, 16#65, 16#2f, 16#70, 16#6e, 16#67, 16#2c, 16#69,
+       16#6d, 16#61, 16#67, 16#65, 16#2f, 16#6a, 16#70, 16#67,
+       16#2c, 16#69, 16#6d, 16#61, 16#67, 16#65, 16#2f, 16#67,
+       16#69, 16#66, 16#2c, 16#61, 16#70, 16#70, 16#6c, 16#69,
+       16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#2f, 16#78,
+       16#6d, 16#6c, 16#2c, 16#61, 16#70, 16#70, 16#6c, 16#69,
+       16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#2f, 16#78,
+       16#68, 16#74, 16#6d, 16#6c, 16#2b, 16#78, 16#6d, 16#6c,
+       16#2c, 16#74, 16#65, 16#78, 16#74, 16#2f, 16#70, 16#6c,
+       16#61, 16#69, 16#6e, 16#2c, 16#74, 16#65, 16#78, 16#74,
+       16#2f, 16#6a, 16#61, 16#76, 16#61, 16#73, 16#63, 16#72,
+       16#69, 16#70, 16#74, 16#2c, 16#70, 16#75, 16#62, 16#6c,
+       16#69, 16#63, 16#70, 16#72, 16#69, 16#76, 16#61, 16#74,
+       16#65, 16#6d, 16#61, 16#78, 16#2d, 16#61, 16#67, 16#65,
+       16#3d, 16#67, 16#7a, 16#69, 16#70, 16#2c, 16#64, 16#65,
+       16#66, 16#6c, 16#61, 16#74, 16#65, 16#2c, 16#73, 16#64,
+       16#63, 16#68, 16#63, 16#68, 16#61, 16#72, 16#73, 16#65,
+       16#74, 16#3d, 16#75, 16#74, 16#66, 16#2d, 16#38, 16#63,
+       16#68, 16#61, 16#72, 16#73, 16#65, 16#74, 16#3d, 16#69,
+       16#73, 16#6f, 16#2d, 16#38, 16#38, 16#35, 16#39, 16#2d,
+       16#31, 16#2c, 16#75, 16#74, 16#66, 16#2d, 16#2c, 16#2a,
+       16#2c, 16#65, 16#6e, 16#71, 16#3d, 16#30, 16#2e >>).
diff --git a/deps/cowlib/src/cowlib.app.src b/deps/cowlib/src/cowlib.app.src
new file mode 100644 (file)
index 0000000..7545fc7
--- /dev/null
@@ -0,0 +1,7 @@
+{application,cowlib,
+             [{description,"Support library for manipulating Web protocols."},
+              {vsn,"1.0.1"},
+              {id,"git"},
+              {modules,[]},
+              {registered,[]},
+              {applications,[kernel,stdlib,crypto]}]}.
diff --git a/deps/licensing/LICENSE-EPL-OTP b/deps/licensing/LICENSE-EPL-OTP
new file mode 100644 (file)
index 0000000..2257751
--- /dev/null
@@ -0,0 +1,286 @@
+ERLANG PUBLIC LICENSE
+Version 1.1
+
+1. Definitions.
+
+1.1. ``Contributor'' means each entity that creates or contributes to
+the creation of Modifications.
+
+1.2. ``Contributor Version'' means the combination of the Original
+Code, prior Modifications used by a Contributor, and the Modifications
+made by that particular Contributor.
+
+1.3. ``Covered Code'' means the Original Code or Modifications or the
+combination of the Original Code and Modifications, in each case
+including portions thereof.
+
+1.4. ``Electronic Distribution Mechanism'' means a mechanism generally
+accepted in the software development community for the electronic
+transfer of data.
+
+1.5. ``Executable'' means Covered Code in any form other than Source
+Code.
+
+1.6. ``Initial Developer'' means the individual or entity identified
+as the Initial Developer in the Source Code notice required by Exhibit
+A.
+
+1.7. ``Larger Work'' means a work which combines Covered Code or
+portions thereof with code not governed by the terms of this License.
+
+1.8. ``License'' means this document.
+
+1.9. ``Modifications'' means any addition to or deletion from the
+substance or structure of either the Original Code or any previous
+Modifications. When Covered Code is released as a series of files, a
+Modification is:
+
+A. Any addition to or deletion from the contents of a file containing
+   Original Code or previous Modifications. 
+
+B. Any new file that contains any part of the Original Code or
+   previous Modifications. 
+
+1.10. ``Original Code'' means Source Code of computer software code
+which is described in the Source Code notice required by Exhibit A as
+Original Code, and which, at the time of its release under this
+License is not already Covered Code governed by this License.
+
+1.11. ``Source Code'' means the preferred form of the Covered Code for
+making modifications to it, including all modules it contains, plus
+any associated interface definition files, scripts used to control
+compilation and installation of an Executable, or a list of source
+code differential comparisons against either the Original Code or
+another well known, available Covered Code of the Contributor's
+choice. The Source Code can be in a compressed or archival form,
+provided the appropriate decompression or de-archiving software is
+widely available for no charge.
+
+1.12. ``You'' means an individual or a legal entity exercising rights
+under, and complying with all of the terms of, this License. For legal
+entities,``You'' includes any entity which controls, is controlled by,
+or is under common control with You. For purposes of this definition,
+``control'' means (a) the power, direct or indirect, to cause the
+direction or management of such entity, whether by contract or
+otherwise, or (b) ownership of fifty percent (50%) or more of the
+outstanding shares or beneficial ownership of such entity.
+
+2. Source Code License.
+
+2.1. The Initial Developer Grant.
+The Initial Developer hereby grants You a world-wide, royalty-free,
+non-exclusive license, subject to third party intellectual property
+claims:
+
+(a) to use, reproduce, modify, display, perform, sublicense and
+    distribute the Original Code (or portions thereof) with or without
+    Modifications, or as part of a Larger Work; and 
+
+(b) under patents now or hereafter owned or controlled by Initial
+    Developer, to make, have made, use and sell (``Utilize'') the
+    Original Code (or portions thereof), but solely to the extent that
+    any such patent is reasonably necessary to enable You to Utilize
+    the Original Code (or portions thereof) and not to any greater
+    extent that may be necessary to Utilize further Modifications or
+    combinations. 
+
+2.2. Contributor Grant.
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license, subject to third party intellectual property
+claims:
+
+(a) to use, reproduce, modify, display, perform, sublicense and
+    distribute the Modifications created by such Contributor (or
+    portions thereof) either on an unmodified basis, with other
+    Modifications, as Covered Code or as part of a Larger Work; and 
+
+(b) under patents now or hereafter owned or controlled by Contributor,
+    to Utilize the Contributor Version (or portions thereof), but
+    solely to the extent that any such patent is reasonably necessary
+    to enable You to Utilize the Contributor Version (or portions
+    thereof), and not to any greater extent that may be necessary to
+    Utilize further Modifications or combinations. 
+
+3. Distribution Obligations.
+
+3.1. Application of License.
+The Modifications which You contribute are governed by the terms of
+this License, including without limitation Section 2.2. The Source
+Code version of Covered Code may be distributed only under the terms
+of this License, and You must include a copy of this License with
+every copy of the Source Code You distribute. You may not offer or
+impose any terms on any Source Code version that alters or restricts
+the applicable version of this License or the recipients' rights
+hereunder. However, You may include an additional document offering
+the additional rights described in Section 3.5. 
+
+3.2. Availability of Source Code.
+Any Modification which You contribute must be made available in Source
+Code form under the terms of this License either on the same media as
+an Executable version or via an accepted Electronic Distribution
+Mechanism to anyone to whom you made an Executable version available;
+and if made available via Electronic Distribution Mechanism, must
+remain available for at least twelve (12) months after the date it
+initially became available, or at least six (6) months after a
+subsequent version of that particular Modification has been made
+available to such recipients. You are responsible for ensuring that
+the Source Code version remains available even if the Electronic
+Distribution Mechanism is maintained by a third party.
+
+3.3. Description of Modifications.
+You must cause all Covered Code to which you contribute to contain a
+file documenting the changes You made to create that Covered Code and
+the date of any change. You must include a prominent statement that
+the Modification is derived, directly or indirectly, from Original
+Code provided by the Initial Developer and including the name of the
+Initial Developer in (a) the Source Code, and (b) in any notice in an
+Executable version or related documentation in which You describe the
+origin or ownership of the Covered Code.
+
+3.4. Intellectual Property Matters
+
+(a) Third Party Claims.
+    If You have knowledge that a party claims an intellectual property
+    right in particular functionality or code (or its utilization
+    under this License), you must include a text file with the source
+    code distribution titled ``LEGAL'' which describes the claim and
+    the party making the claim in sufficient detail that a recipient
+    will know whom to contact. If you obtain such knowledge after You
+    make Your Modification available as described in Section 3.2, You
+    shall promptly modify the LEGAL file in all copies You make
+    available thereafter and shall take other steps (such as notifying
+    appropriate mailing lists or newsgroups) reasonably calculated to
+    inform those who received the Covered Code that new knowledge has
+    been obtained. 
+
+(b) Contributor APIs.
+    If Your Modification is an application programming interface and
+    You own or control patents which are reasonably necessary to
+    implement that API, you must also include this information in the
+    LEGAL file. 
+
+3.5. Required Notices.
+You must duplicate the notice in Exhibit A in each file of the Source
+Code, and this License in any documentation for the Source Code, where
+You describe recipients' rights relating to Covered Code. If You
+created one or more Modification(s), You may add your name as a
+Contributor to the notice described in Exhibit A. If it is not
+possible to put such notice in a particular Source Code file due to
+its structure, then you must include such notice in a location (such
+as a relevant directory file) where a user would be likely to look for
+such a notice. You may choose to offer, and to charge a fee for,
+warranty, support, indemnity or liability obligations to one or more
+recipients of Covered Code. However, You may do so only on Your own
+behalf, and not on behalf of the Initial Developer or any
+Contributor. You must make it absolutely clear than any such warranty,
+support, indemnity or liability obligation is offered by You alone,
+and You hereby agree to indemnify the Initial Developer and every
+Contributor for any liability incurred by the Initial Developer or
+such Contributor as a result of warranty, support, indemnity or
+liability terms You offer.
+
+3.6. Distribution of Executable Versions.
+You may distribute Covered Code in Executable form only if the
+requirements of Section 3.1-3.5 have been met for that Covered Code,
+and if You include a notice stating that the Source Code version of
+the Covered Code is available under the terms of this License,
+including a description of how and where You have fulfilled the
+obligations of Section 3.2. The notice must be conspicuously included
+in any notice in an Executable version, related documentation or
+collateral in which You describe recipients' rights relating to the
+Covered Code. You may distribute the Executable version of Covered
+Code under a license of Your choice, which may contain terms different
+from this License, provided that You are in compliance with the terms
+of this License and that the license for the Executable version does
+not attempt to limit or alter the recipient's rights in the Source
+Code version from the rights set forth in this License. If You
+distribute the Executable version under a different license You must
+make it absolutely clear that any terms which differ from this License
+are offered by You alone, not by the Initial Developer or any
+Contributor. You hereby agree to indemnify the Initial Developer and
+every Contributor for any liability incurred by the Initial Developer
+or such Contributor as a result of any such terms You offer.
+
+3.7. Larger Works.
+You may create a Larger Work by combining Covered Code with other code
+not governed by the terms of this License and distribute the Larger
+Work as a single product. In such a case, You must make sure the
+requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Code due to statute
+or regulation then You must: (a) comply with the terms of this License
+to the maximum extent possible; and (b) describe the limitations and
+the code they affect. Such description must be included in the LEGAL
+file described in Section 3.4 and must be included with all
+distributions of the Source Code. Except to the extent prohibited by
+statute or regulation, such description must be sufficiently detailed
+for a recipient of ordinary skill to be able to understand it.
+
+5. Application of this License.
+
+This License applies to code to which the Initial Developer has
+attached the notice in Exhibit A, and to related Covered Code.
+
+6. CONNECTION TO MOZILLA PUBLIC LICENSE
+
+This Erlang License is a derivative work of the Mozilla Public
+License, Version 1.0. It contains terms which differ from the Mozilla
+Public License, Version 1.0.
+
+7. DISCLAIMER OF WARRANTY.
+
+COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN ``AS IS'' BASIS,
+WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR
+NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF
+THE COVERED CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE
+IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER
+CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR
+CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART
+OF THIS LICENSE. NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER
+EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+This License and the rights granted hereunder will terminate
+automatically if You fail to comply with terms herein and fail to cure
+such breach within 30 days of becoming aware of the breach. All
+sublicenses to the Covered Code which are properly granted shall
+survive any termination of this License. Provisions which, by their
+nature, must remain in effect beyond the termination of this License
+shall survive.
+
+9. DISCLAIMER OF LIABILITY
+Any utilization of Covered Code shall not cause the Initial Developer
+or any Contributor to be liable for any damages (neither direct nor
+indirect).
+
+10. MISCELLANEOUS
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision is held to be unenforceable, such
+provision shall be reformed only to the extent necessary to make it
+enforceable. This License shall be construed by and in accordance with
+the substantive laws of Sweden. Any dispute, controversy or claim
+arising out of or relating to this License, or the breach, termination
+or invalidity thereof, shall be subject to the exclusive jurisdiction
+of Swedish courts, with the Stockholm City Court as the first
+instance.
+       
+EXHIBIT A.
+
+``The contents of this file are subject to the Erlang Public License,
+Version 1.1, (the "License"); you may not use this file except in
+compliance with the License. You should have received a copy of the
+Erlang Public License along with this software. If not, it can be
+retrieved via the world wide web at http://www.erlang.org/.
+
+Software distributed under the License is distributed on an "AS IS"
+basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+the License for the specific language governing rights and limitations
+under the License.
+
+The Initial Developer of the Original Code is Ericsson AB.
+Portions created by Ericsson are Copyright 2013, Ericsson AB.
+All Rights Reserved.''
diff --git a/deps/licensing/LICENSE-MIT-Erlware-Commons b/deps/licensing/LICENSE-MIT-Erlware-Commons
new file mode 100644 (file)
index 0000000..fc89c02
--- /dev/null
@@ -0,0 +1,21 @@
+Copyright (c) 2011 Erlware, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/deps/licensing/LICENSE-MIT-Mochiweb b/deps/licensing/LICENSE-MIT-Mochiweb
new file mode 100644 (file)
index 0000000..7b7c506
--- /dev/null
@@ -0,0 +1,22 @@
+This is the MIT license.
+
+Copyright (c) 2007 Mochi Media, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
similarity index 96%
rename from rabbitmq-server/LICENSE-MIT-eldap
rename to deps/licensing/LICENSE-MIT-SockJS
index 1f6200918f7b8b90047ae33eac3c175507dd76b1..a89716714a4612336b39c0e3a073cc28e83be409 100644 (file)
@@ -1,6 +1,5 @@
+Copyright (C) 2011 VMware, Inc.
 
-Copyright (c) 2010, Torbjorn Tornkvist
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
 in the Software without restriction, including without limitation the rights
@@ -18,4 +17,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
-
diff --git a/deps/licensing/LICENSE-MPL2 b/deps/licensing/LICENSE-MPL2
new file mode 100644 (file)
index 0000000..14e2f77
--- /dev/null
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+    means each individual or legal entity that creates, contributes to
+    the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+    means the combination of the Contributions of others (if any) used
+    by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+    means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+    means Source Code Form to which the initial Contributor has attached
+    the notice in Exhibit A, the Executable Form of such Source Code
+    Form, and Modifications of such Source Code Form, in each case
+    including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+    means
+
+    (a) that the initial Contributor has attached the notice described
+        in Exhibit B to the Covered Software; or
+
+    (b) that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the
+        terms of a Secondary License.
+
+1.6. "Executable Form"
+    means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+    means a work that combines Covered Software with other material, in 
+    a separate file or files, that is not Covered Software.
+
+1.8. "License"
+    means this document.
+
+1.9. "Licensable"
+    means having the right to grant, to the maximum extent possible,
+    whether at the time of the initial grant or subsequently, any and
+    all of the rights conveyed by this License.
+
+1.10. "Modifications"
+    means any of the following:
+
+    (a) any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered
+        Software; or
+
+    (b) any new file in Source Code Form that contains any Covered
+        Software.
+
+1.11. "Patent Claims" of a Contributor
+    means any patent claim(s), including without limitation, method,
+    process, and apparatus claims, in any patent Licensable by such
+    Contributor that would be infringed, but for the grant of the
+    License, by the making, using, selling, offering for sale, having
+    made, import, or transfer of either its Contributions or its
+    Contributor Version.
+
+1.12. "Secondary License"
+    means either the GNU General Public License, Version 2.0, the GNU
+    Lesser General Public License, Version 2.1, the GNU Affero General
+    Public License, Version 3.0, or any later versions of those
+    licenses.
+
+1.13. "Source Code Form"
+    means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+    means an individual or a legal entity exercising rights under this
+    License. For legal entities, "You" includes any entity that
+    controls, is controlled by, or is under common control with You. For
+    purposes of this definition, "control" means (a) the power, direct
+    or indirect, to cause the direction or management of such entity,
+    whether by contract or otherwise, or (b) ownership of more than
+    fifty percent (50%) of the outstanding shares or beneficial
+    ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+    Licensable by such Contributor to use, reproduce, make available,
+    modify, display, perform, distribute, and otherwise exploit its
+    Contributions, either on an unmodified basis, with Modifications, or
+    as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+    for sale, have made, import, and otherwise transfer either its
+    Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+    or
+
+(b) for infringements caused by: (i) Your and any other third party's
+    modifications of Covered Software, or (ii) the combination of its
+    Contributions with other software (except as part of its Contributor
+    Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+    its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+    Form, as described in Section 3.1, and You must inform recipients of
+    the Executable Form how they can obtain a copy of such Source Code
+    Form by reasonable means in a timely manner, at a charge no more
+    than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+    License, or sublicense it under different terms, provided that the
+    license for the Executable Form does not attempt to limit or alter
+    the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+*                                                                      *
+*  6. Disclaimer of Warranty                                           *
+*  -------------------------                                           *
+*                                                                      *
+*  Covered Software is provided under this License on an "as is"       *
+*  basis, without warranty of any kind, either expressed, implied, or  *
+*  statutory, including, without limitation, warranties that the       *
+*  Covered Software is free of defects, merchantable, fit for a        *
+*  particular purpose or non-infringing. The entire risk as to the     *
+*  quality and performance of the Covered Software is with You.        *
+*  Should any Covered Software prove defective in any respect, You     *
+*  (not any Contributor) assume the cost of any necessary servicing,   *
+*  repair, or correction. This disclaimer of warranty constitutes an   *
+*  essential part of this License. No use of any Covered Software is   *
+*  authorized under this License except under this disclaimer.         *
+*                                                                      *
+************************************************************************
+
+************************************************************************
+*                                                                      *
+*  7. Limitation of Liability                                          *
+*  --------------------------                                          *
+*                                                                      *
+*  Under no circumstances and under no legal theory, whether tort      *
+*  (including negligence), contract, or otherwise, shall any           *
+*  Contributor, or anyone who distributes Covered Software as          *
+*  permitted above, be liable to You for any direct, indirect,         *
+*  special, incidental, or consequential damages of any character      *
+*  including, without limitation, damages for lost profits, loss of    *
+*  goodwill, work stoppage, computer failure or malfunction, or any    *
+*  and all other commercial damages or losses, even if such party      *
+*  shall have been informed of the possibility of such damages. This   *
+*  limitation of liability shall not apply to liability for death or   *
+*  personal injury resulting from such party's negligence to the       *
+*  extent applicable law prohibits such limitation. Some               *
+*  jurisdictions do not allow the exclusion or limitation of           *
+*  incidental or consequential damages, so this exclusion and          *
+*  limitation may not apply to You.                                    *
+*                                                                      *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+  This Source Code Form is subject to the terms of the Mozilla Public
+  License, v. 2.0. If a copy of the MPL was not distributed with this
+  file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+  This Source Code Form is "Incompatible With Secondary Licenses", as
+  defined by the Mozilla Public License, v. 2.0.
diff --git a/deps/licensing/license_info_rabbitmq_codegen b/deps/licensing/license_info_rabbitmq_codegen
new file mode 100644 (file)
index 0000000..a703cbd
--- /dev/null
@@ -0,0 +1,4 @@
+The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are
+"Copyright (C) 2008-2016 Pivotal Software, Inc", Inc. and are covered by the MIT
+license.
+
diff --git a/deps/mochiweb/.editorconfig b/deps/mochiweb/.editorconfig
new file mode 100644 (file)
index 0000000..d03550e
--- /dev/null
@@ -0,0 +1,17 @@
+# EditorConfig file: http://EditorConfig.org
+
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+# 4 space indentation
+[*.{erl,src,hrl}]
+indent_style = space
+indent_size = 4
diff --git a/deps/mochiweb/CHANGES.md b/deps/mochiweb/CHANGES.md
new file mode 100644 (file)
index 0000000..1b88f92
--- /dev/null
@@ -0,0 +1,206 @@
+Version 2.13.1 released 2016-03-13
+
+* Fix mochiweb_html regression parsing invalid charref sequences
+  https://github.com/mochi/mochiweb/issues/167
+
+Version 2.13.0 released 2016-02-08
+
+* Support parsing of UTF-16 surrogate pairs encoded as character
+  references in mochiweb_html
+  https://github.com/mochi/mochiweb/issues/164
+* Avoid swallowing messages that are not related to the socket
+  during request parsing
+  https://github.com/mochi/mochiweb/pull/161
+* Ensure correct ordering of Set-Cookie headers: first in, first out
+  https://github.com/mochi/mochiweb/issues/162
+* Improve response times by caching a formatted date once per second
+  for the response headers with a mochiweb_clock service
+  https://github.com/mochi/mochiweb/pull/158
+
+Version 2.12.2 released 2015-02-21
+
+* Close connections quietly when setopts fails with a closed socket.
+  https://github.com/mochi/mochiweb/pull/152
+
+Version 2.12.1 released 2015-02-01
+
+* Fix active_socket accounting
+  https://github.com/mochi/mochiweb/issues/149
+* Added full MIT license preludes to each source file to make it
+  easier for mochiweb's code to be used piecemeal
+  https://github.com/mochi/mochiweb/pull/148
+
+Version 2.12.0 released 2015-01-16
+
+* Send "Connection: close" header when the server is going to close
+  a Keep-Alive connection, usually due to unread data from the
+  client
+  https://github.com/mochi/mochiweb/issues/146
+
+Version 2.11.2 released 2015-01-16
+
+* Fix regression introduced in #147
+  https://github.com/mochi/mochiweb/pull/147
+
+Version 2.11.1 released 2015-01-16
+
+* Accept range end position which exceededs the resource size
+  https://github.com/mochi/mochiweb/pull/147
+
+Version 2.11.0 released 2015-01-12
+
+* Perform SSL handshake after releasing acceptor back into the pool,
+  and slow accept rate when file descriptors are not available,
+  to mitigate a potential DoS attack. Adds new mochiweb_socket
+  functions transport_accept/1 and finish_accept/1 which should be
+  used in preference to the now deprecated accept/1 function.
+  https://github.com/mochi/mochiweb/issues/138
+
+Version 2.10.1 released 2015-01-11
+
+* Fixes issue with SSL and mochiweb_websocket. Note that
+  mochiweb_websocket is still experimental and the API
+  is subject to change in future versions.
+  https://github.com/mochi/mochiweb/pull/144
+
+Version 2.10.0 released 2014-12-17
+
+* Added new `recbuf` option to mochiweb_http to allow the receive
+  buffer to be configured.
+  https://github.com/mochi/mochiweb/pull/134
+
+Version 2.9.2 released 2014-10-16
+
+* Add timeouts to SSL connect to prevent DoS by opening a connection
+  and not doing anything.
+  https://github.com/mochi/mochiweb/pull/140
+* Prevent using ECDH cipher in R16B because it is broken
+  https://github.com/mochi/mochiweb/pull/140
+* For default SSL connections, remove usage of sslv3 and not-so-secure
+  ciphers.
+  https://github.com/mochi/mochiweb/pull/140
+
+Version 2.9.1 released 2014-09-29
+
+* Fix Makefile rule for building docs
+  https://github.com/mochi/mochiweb/issues/135
+* Minimize gen_tcp:send calls to optimize performance.
+  https://github.com/mochi/mochiweb/pull/137
+
+Version 2.9.0 released 2014-06-24
+
+* Increased timeout in test suite for FreeBSD
+  https://github.com/mochi/mochiweb/pull/121
+* Updated rebar to v2.5.0 and fixed associated build issues
+  https://github.com/mochi/mochiweb/issues/131
+
+Version 2.8.0 released 2014-01-01
+
+* Websocket support
+  https://github.com/mochi/mochiweb/pull/120
+* Force files named "crossdomain.xml" to have MIME type
+  text/x-cross-domain-policy.
+  https://github.com/mochi/mochiweb/pull/118
+
+Version 2.7.0 released 2013-08-01
+
+* Fix 0-length range responses
+  https://github.com/mochi/mochiweb/pull/87
+* Add support for all possible `erlang:decode_packet/3` responses,
+  previously these would just crash.
+  https://github.com/mochi/mochiweb/pull/114
+* Makefile fixed to make `make test` work before `make all`
+  https://github.com/mochi/mochiweb/pull/116
+* Usage of the crypto module made R16B01+ compatible
+  https://github.com/mochi/mochiweb/pull/115
+* Build fixed for R16B01
+  https://github.com/mochi/mochiweb/pull/112
+* `mochiweb_socket_server:stop/1` is now a synchronous
+  call instead of an asynchronous cast
+* `mochiweb_html:parse_tokens/1` (and `parse/1`) will now create a
+  html element to wrap documents that have a HTML5 doctype
+  (`<!doctype html>`) but no html element
+  https://github.com/mochi/mochiweb/issues/110
+
+Version 2.6.0 released 2013-04-15
+
+* Enable R15B gen_tcp workaround only on R15B
+  https://github.com/mochi/mochiweb/pull/107
+
+Version 2.5.0 released 2013-03-04
+
+* Replace now() with os:timestamp() in acceptor (optimization)
+  https://github.com/mochi/mochiweb/pull/102
+* New mochiweb_session module for managing session cookies.
+  NOTE: this module is only supported on R15B02 and later!
+  https://github.com/mochi/mochiweb/pull/94
+* New mochiweb_base64url module for base64url encoding
+  (URL and Filename safe alphabet, see RFC 4648).
+* Fix rebar.config in mochiwebapp_skel to use {branch, "master"}
+  https://github.com/mochi/mochiweb/issues/105
+
+Version 2.4.2 released 2013-02-05
+
+* Fixed issue in mochiweb_response introduced in v2.4.0
+  https://github.com/mochi/mochiweb/pull/100
+
+Version 2.4.1 released 2013-01-30
+
+* Fixed issue in mochiweb_request introduced in v2.4.0
+  https://github.com/mochi/mochiweb/issues/97
+* Fixed issue in mochifmt_records introduced in v2.4.0
+  https://github.com/mochi/mochiweb/issues/96
+
+Version 2.4.0 released 2013-01-23
+
+* Switch from parameterized modules to explicit tuple module calls for
+  R16 compatibility (#95)
+* Fix for mochiweb_acceptor crash with extra-long HTTP headers under
+  R15B02 (#91)
+* Fix case in handling range headers (#85)
+* Handle combined Content-Length header (#88)
+* Windows security fix for `safe_relative_path`, any path with a
+  backslash on any platform is now considered unsafe (#92)
+
+Version 2.3.2 released 2012-07-27
+
+* Case insensitive match for "Connection: close" (#81)
+
+Version 2.3.1 released 2012-03-31
+
+* Fix edoc warnings (#63)
+* Fix mochiweb_html handling of invalid charref sequences (unescaped &) (#69).
+* Add a manual garbage collection between requests to avoid worst case behavior
+  on keep-alive sockets.
+* Fix dst cookie bug (#73)
+* Removed unnecessary template_dir option, see
+  https://github.com/basho/rebar/issues/203
+
+Version 2.3.0 released 2011-10-14
+
+* Handle ssl_closed message in mochiweb_http (#59)
+* Added support for new MIME types (otf, eot, m4v, svg, svgz, ttc, ttf,
+  vcf, webm, webp, woff) (#61)
+* Updated mochiweb_charref to support all HTML5 entities. Note that
+  if you are using this module directly, the spec has changed to return
+  `[integer()]` for some entities. (#64)
+
+Version 2.2.1 released 2011-08-31
+
+* Removed `mochiweb_skel` module from the pre-rebar era
+
+Version 2.2.0 released 2011-08-29
+
+* Added new `mochiweb_http:start_link/1` and
+  `mochiweb_socket_server:start_link/1` APIs to explicitly start linked
+  servers. Also added `{link, false}` option to the `start/1` variants
+  to explicitly start unlinked. This is in expectation that we will
+  eventually change the default behavior of `start/1` to be unlinked as you
+  would expect it to. See https://github.com/mochi/mochiweb/issues/58 for
+  discussion.
+
+Version 2.1.0 released 2011-08-29
+
+* Added new `mochijson2:decode/2` with `{format, struct | proplist | eep18}`
+  options for easy decoding to various proplist formats. Also added encoding
+  support for eep18 style objects.
diff --git a/deps/mochiweb/Makefile b/deps/mochiweb/Makefile
new file mode 100644 (file)
index 0000000..244d7be
--- /dev/null
@@ -0,0 +1,22 @@
+IGNORE_DEPS += edown eper eunit_formatters meck node_package rebar_lock_deps_plugin rebar_vsn_plugin reltool_util
+C_SRC_DIR = /path/do/not/exist
+C_SRC_TYPE = rebar
+DRV_CFLAGS = -fPIC
+export DRV_CFLAGS
+ERLANG_ARCH = 64
+export ERLANG_ARCH
+ERLC_OPTS = +debug_info
+export ERLC_OPTS
+
+COMPILE_FIRST +=
+
+
+rebar_dep: preprocess pre-deps deps pre-app app
+
+preprocess::
+
+pre-deps::
+
+pre-app::
+
+include ../../erlang.mk
\ No newline at end of file
diff --git a/deps/mochiweb/Makefile.orig.mk b/deps/mochiweb/Makefile.orig.mk
new file mode 100644 (file)
index 0000000..983c304
--- /dev/null
@@ -0,0 +1,24 @@
+PREFIX:=../
+DEST:=$(PREFIX)$(PROJECT)
+
+REBAR=./rebar
+
+.PHONY: all edoc test clean build_plt dialyzer app
+
+all:
+       @$(REBAR) prepare-deps
+
+edoc: all
+       @$(REBAR) doc
+
+test:
+       @rm -rf .eunit
+       @mkdir -p .eunit
+       @$(REBAR) eunit
+
+clean:
+       @$(REBAR) clean
+
+app:
+       @[ -z "$(PROJECT)" ] && echo "ERROR: required variable PROJECT missing" 1>&2 && exit 1 || true
+       @$(REBAR) -r create template=mochiwebapp dest=$(DEST) appid=$(PROJECT)
diff --git a/deps/mochiweb/examples/websocket/index.html b/deps/mochiweb/examples/websocket/index.html
new file mode 100644 (file)
index 0000000..6926aba
--- /dev/null
@@ -0,0 +1,59 @@
+<!doctype html>
+<html>
+<head>
+  <title>Websockets With Mochiweb Demo</title>
+</head>
+<body>
+<h1>Mochiweb websocket demo</h1>
+
+  <div id="connect">
+     <button id="btnConn">Connect</button>
+     &nbsp; State: <span id="connstate" style="font-weight:bold;"></span>
+  </div>
+    <br/><i>Protip: open your javascript error console, just in case..</i><br/>
+  <hr/>
+  <div id="connected">
+    <form id="sendForm">
+      <input id="phrase" type="text"/>
+      <input id="btnSend" class="button" type="submit" name="connect"
+         value="Send"/>
+    </form>
+  </div>
+  <hr/>
+  <div id="msgs"></div>
+
+  <script type="text/javascript">
+    var ws;
+    if (!window.WebSocket) {
+      alert("WebSocket not supported by this browser");
+    }
+    function $(id) {
+      return document.getElementById(id);
+    }
+    function go() {
+        ws = new WebSocket("ws://" + location.host + "/");
+        ws.onopen = function () {
+          $('connstate').innerHTML = 'CONNECTED';
+        }
+        ws.onclose = function () {
+          $('connstate').innerHTML = 'CLOSED';
+        }
+        ws.onmessage = function (e) {
+          var p = document.createElement('pre');
+          p.appendChild(document.createTextNode(e.data));
+          $('msgs').appendChild(p);
+        }
+    }
+    $('sendForm').onsubmit = function (event) {
+      var p = $('phrase');
+      ws.send(p.value);
+      p.value='';
+      return false;
+    }
+    $('btnConn').onclick = function(event) {
+      go(); return false;
+    };
+  </script>
+  </body>
+</html>
+
diff --git a/deps/mochiweb/examples/websocket/websocket.erl b/deps/mochiweb/examples/websocket/websocket.erl
new file mode 100644 (file)
index 0000000..cd52da1
--- /dev/null
@@ -0,0 +1,148 @@
+-module(websocket).
+
+%% To run: erlc websocket.erl && erl -pa ../../ebin -s websocket
+
+%% The MIT License (MIT)
+
+%% Copyright (c) 2012 Zadane.pl sp. z o.o.
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+-export([start/0, start_link/0, ws_loop/3, loop/2]).
+-export([broadcast_server/1]).
+
+%%
+%% Mochiweb websocket example
+%%
+%% [1]: At first you have to start HTTP server which will listen for HTTP
+%%      requests and eventually upgrade connection to websocket
+%% [2]: Attempt to upgrade connection to websocket.
+%%      Function mochiweb_websocket:upgrade_connection/2:
+%%      * first argument is mochiweb_request
+%%      * second is M:F which will handle further websocket messages.
+%%      Function return two funs:
+%%      * ReentryWs/1 - use it to enter to messages handling loop
+%%        (in this example ws_loop/3)
+%%      * ReplyChannel/1 - use to send messages to client. May be passed to
+%%        other processes
+%% [3]: Example of sending message to client
+%% [4]: State that will be passed to message handling loop
+%% [5]: Pass control to messages handling loop. From this moment each message
+%%      received from client can be handled...
+%% [6]: ...here as Payload. State is variable intended for holding your custom
+%%      state. ReplyChannel is the same function as in [3].
+%%      Notice! Payload is list of messages received from client. Websocket
+%%      framing mechanism concatenates messages which are sent one after another
+%%      in short time.
+%% [7]: Print payload received from client and send it back
+%% [8]: Message handling function must return new state value
+start() ->
+    spawn(
+      fun () ->
+              application:start(sasl),
+              start_link(),
+              receive
+                  stop -> ok
+              end
+      end).
+
+start_link() ->
+    %% [1]
+    io:format("Listening at http://127.0.0.1:8080/~n"),
+    Broadcaster = spawn_link(?MODULE, broadcast_server, [dict:new()]),
+    mochiweb_http:start_link([
+                              {name, client_access},
+                              {loop, {?MODULE, loop, [Broadcaster]}},
+                              {port, 8080}
+                             ]).
+
+ws_loop(Payload, Broadcaster, _ReplyChannel) ->
+    %% [6]
+
+    %% [7]
+    io:format("Received data: ~p~n", [Payload]),
+    Received = list_to_binary(Payload),
+    Broadcaster ! {broadcast, self(), Received},
+
+    %% [8]
+    Broadcaster.
+
+loop(Req, Broadcaster) ->
+    H = mochiweb_request:get_header_value("Upgrade", Req),
+    loop(Req,
+         Broadcaster,
+         H =/= undefined andalso string:to_lower(H) =:= "websocket").
+
+loop(Req, _Broadcaster, false) ->
+    mochiweb_request:serve_file("index.html", "./", Req);
+loop(Req, Broadcaster, true) ->
+    {ReentryWs, ReplyChannel} = mochiweb_websocket:upgrade_connection(
+                                  Req, fun ?MODULE:ws_loop/3),
+    %% [3]
+    Broadcaster ! {register, self(), ReplyChannel},
+    %% [4]
+    %% [5]
+    ReentryWs(Broadcaster).
+
+
+%% This server keeps track of connected pids
+broadcast_server(Pids) ->
+    Pids1 = receive
+                {register, Pid, Channel} ->
+                    broadcast_register(Pid, Channel, Pids);
+                {broadcast, Pid, Message} ->
+                    broadcast_sendall(Pid, Message, Pids);
+                {'DOWN', MRef, process, Pid, _Reason} ->
+                    broadcast_down(Pid, MRef, Pids);
+                Msg ->
+                    io:format("Unknown message: ~p~n", [Msg]),
+                    Pids
+            end,
+    erlang:hibernate(?MODULE, broadcast_server, [Pids1]).
+
+broadcast_register(Pid, Channel, Pids) ->
+    MRef = erlang:monitor(process, Pid),
+    broadcast_sendall(
+      Pid, "connected", dict:store(Pid, {Channel, MRef}, Pids)).
+
+broadcast_down(Pid, MRef, Pids) ->
+    Pids1 = case dict:find(Pid, Pids) of
+                {ok, {_, MRef}} ->
+                    dict:erase(Pid, Pids);
+                _ ->
+                    Pids
+            end,
+    broadcast_sendall(Pid, "disconnected", Pids1).
+
+broadcast_sendall(Pid, Msg, Pids) ->
+    M = iolist_to_binary([pid_to_list(Pid), ": ", Msg]),
+    dict:fold(
+      fun (K, {Reply, MRef}, Acc) ->
+              try
+                  begin
+                      Reply(M),
+                      dict:store(K, {Reply, MRef}, Acc)
+                  end
+              catch
+                  _:_ ->
+                      Acc
+              end
+      end,
+      dict:new(),
+      Pids).
diff --git a/deps/mochiweb/rebar b/deps/mochiweb/rebar
new file mode 100755 (executable)
index 0000000..0d1980b
Binary files /dev/null and b/deps/mochiweb/rebar differ
similarity index 81%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar.config
rename to deps/mochiweb/rebar.config
index 101930ab4f8f3d4f769912b1770b06970ef2b378..0ae370c11495478447370f385ec44f30140c7470 100644 (file)
@@ -1,6 +1,7 @@
 % -*- mode: erlang -*-
 {erl_opts, [debug_info,
-            {platform_define, "R15", 'gen_tcp_r15b_workaround'}]}.
+            {platform_define, "R15", 'gen_tcp_r15b_workaround'},
+            {platform_define, "(R14|R15|R16B-)", 'crypto_compatibility'}]}.
 {cover_enabled, true}.
 {eunit_opts, [verbose, {report,{eunit_surefire,[{dir,"."}]}}]}.
 {dialyzer_opts, [{warnings, [no_return,
similarity index 93%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt.erl
rename to deps/mochiweb/src/mochifmt.erl
index fc95e4f60da6735902e9737c7e8b76d0ce4ca2cf..6381bb7417c7de42dd9c69d37edda3e5e27876ba 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2008 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc String Formatting for Erlang, inspired by Python 2.6
 %%      (<a href="http://www.python.org/dev/peps/pep-3101/">PEP 3101</a>).
similarity index 50%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_records.erl
rename to deps/mochiweb/src/mochifmt_records.erl
index 7d166ffdc7db3ce47086ddc86ac1bbb82e463a2d..3dccaa41eb5484a6c7533b4f0f0e2ff43ce58ded 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2008 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Formatter that understands records.
 %%
diff --git a/deps/mochiweb/src/mochifmt_std.erl b/deps/mochiweb/src/mochifmt_std.erl
new file mode 100644 (file)
index 0000000..6067451
--- /dev/null
@@ -0,0 +1,51 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2008 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+%% @doc Template module for a mochifmt formatter.
+
+-module(mochifmt_std).
+-author('bob@mochimedia.com').
+-export([new/0, format/3, get_value/3, format_field/3, get_field/3, convert_field/3]).
+
+new() ->
+    {?MODULE}.
+
+format(Format, Args, {?MODULE}=THIS) ->
+    mochifmt:format(Format, Args, THIS).
+
+get_field(Key, Args, {?MODULE}=THIS) ->
+    mochifmt:get_field(Key, Args, THIS).
+
+convert_field(Key, Args, {?MODULE}) ->
+    mochifmt:convert_field(Key, Args).
+
+get_value(Key, Args, {?MODULE}) ->
+    mochifmt:get_value(Key, Args).
+
+format_field(Arg, Format, {?MODULE}=THIS) ->
+    mochifmt:format_field(Arg, Format, THIS).
+
+%%
+%% Tests
+%%
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+-endif.
diff --git a/deps/mochiweb/src/mochiglobal.erl b/deps/mochiweb/src/mochiglobal.erl
new file mode 100644 (file)
index 0000000..8df007f
--- /dev/null
@@ -0,0 +1,127 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+
+%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6)
+%%      <a href="http://www.erlang.org/pipermail/erlang-questions/2009-March/042503.html">[1]</a>.
+-module(mochiglobal).
+-author("Bob Ippolito <bob@mochimedia.com>").
+-export([get/1, get/2, put/2, delete/1]).
+
+-spec get(atom()) -> any() | undefined.
+%% @equiv get(K, undefined)
+get(K) ->
+    get(K, undefined).
+
+-spec get(atom(), T) -> any() | T.
+%% @doc Get the term for K or return Default.
+get(K, Default) ->
+    get(K, Default, key_to_module(K)).
+
+get(_K, Default, Mod) ->
+    try Mod:term()
+    catch error:undef ->
+            Default
+    end.
+
+-spec put(atom(), any()) -> ok.
+%% @doc Store term V at K, replaces an existing term if present.
+put(K, V) ->
+    put(K, V, key_to_module(K)).
+
+put(_K, V, Mod) ->
+    Bin = compile(Mod, V),
+    code:purge(Mod),
+    {module, Mod} = code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
+    ok.
+
+-spec delete(atom()) -> boolean().
+%% @doc Delete term stored at K, no-op if non-existent.
+delete(K) ->
+    delete(K, key_to_module(K)).
+
+delete(_K, Mod) ->
+    code:purge(Mod),
+    code:delete(Mod).
+
+-spec key_to_module(atom()) -> atom().
+key_to_module(K) ->
+    list_to_atom("mochiglobal:" ++ atom_to_list(K)).
+
+-spec compile(atom(), any()) -> binary().
+compile(Module, T) ->
+    {ok, Module, Bin} = compile:forms(forms(Module, T),
+                                      [verbose, report_errors]),
+    Bin.
+
+-spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
+forms(Module, T) ->
+    [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
+
+-spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
+term_to_abstract(Module, Getter, T) ->
+    [%% -module(Module).
+     erl_syntax:attribute(
+       erl_syntax:atom(module),
+       [erl_syntax:atom(Module)]),
+     %% -export([Getter/0]).
+     erl_syntax:attribute(
+       erl_syntax:atom(export),
+       [erl_syntax:list(
+         [erl_syntax:arity_qualifier(
+            erl_syntax:atom(Getter),
+            erl_syntax:integer(0))])]),
+     %% Getter() -> T.
+     erl_syntax:function(
+       erl_syntax:atom(Getter),
+       [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])].
+
+%%
+%% Tests
+%%
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+get_put_delete_test() ->
+    K = '$$test$$mochiglobal',
+    delete(K),
+    ?assertEqual(
+       bar,
+       get(K, bar)),
+    try
+        ?MODULE:put(K, baz),
+        ?assertEqual(
+           baz,
+           get(K, bar)),
+        ?MODULE:put(K, wibble),
+        ?assertEqual(
+           wibble,
+           ?MODULE:get(K))
+    after
+        delete(K)
+    end,
+    ?assertEqual(
+       bar,
+       get(K, bar)),
+    ?assertEqual(
+       undefined,
+       ?MODULE:get(K)),
+    ok.
+-endif.
similarity index 65%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochihex.erl
rename to deps/mochiweb/src/mochihex.erl
index 796f3ada28b56c14a2bf78f8b31ab52e1985dcf4..91b2789da5c6d0c1fdffcba0e61373a636a4250d 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2006 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Utilities for working with hexadecimal strings.
 
similarity index 94%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochijson.erl
rename to deps/mochiweb/src/mochijson.erl
index d28318993fa2acb9905d6365460082d624b6c865..fb9b1dc374cffd12be6afd3d997c9a155149728b 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2006 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Yet another JSON (RFC 4627) library for Erlang.
 -module(mochijson).
diff --git a/deps/mochiweb/src/mochijson2.erl b/deps/mochiweb/src/mochijson2.erl
new file mode 100644 (file)
index 0000000..255398d
--- /dev/null
@@ -0,0 +1,942 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works
+%%      with binaries as strings, arrays as lists (without an {array, _})
+%%      wrapper and it only knows how to decode UTF-8 (and ASCII).
+%%
+%%      JSON terms are decoded as follows (javascript -> erlang):
+%%      <ul>
+%%          <li>{"key": "value"} ->
+%%              {struct, [{&lt;&lt;"key">>, &lt;&lt;"value">>}]}</li>
+%%          <li>["array", 123, 12.34, true, false, null] ->
+%%              [&lt;&lt;"array">>, 123, 12.34, true, false, null]
+%%          </li>
+%%      </ul>
+%%      <ul>
+%%          <li>Strings in JSON decode to UTF-8 binaries in Erlang</li>
+%%          <li>Objects decode to {struct, PropList}</li>
+%%          <li>Numbers decode to integer or float</li>
+%%          <li>true, false, null decode to their respective terms.</li>
+%%      </ul>
+%%      The encoder will accept the same format that the decoder will produce,
+%%      but will also allow additional cases for leniency:
+%%      <ul>
+%%          <li>atoms other than true, false, null will be considered UTF-8
+%%              strings (even as a proplist key)
+%%          </li>
+%%          <li>{json, IoList} will insert IoList directly into the output
+%%              with no validation
+%%          </li>
+%%          <li>{array, Array} will be encoded as Array
+%%              (legacy mochijson style)
+%%          </li>
+%%          <li>A non-empty raw proplist will be encoded as an object as long
+%%              as the first pair does not have an atom key of json, struct,
+%%              or array
+%%          </li>
+%%      </ul>
+
+-module(mochijson2).
+-author('bob@mochimedia.com').
+-export([encoder/1, encode/1]).
+-export([decoder/1, decode/1, decode/2]).
+
+%% This is a macro to placate syntax highlighters..
+-define(Q, $\").
+-define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset,
+                                 column=N+S#decoder.column}).
+-define(INC_COL(S), S#decoder{offset=1+S#decoder.offset,
+                              column=1+S#decoder.column}).
+-define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset,
+                               column=1,
+                               line=1+S#decoder.line}).
+-define(INC_CHAR(S, C),
+        case C of
+            $\n ->
+                S#decoder{column=1,
+                          line=1+S#decoder.line,
+                          offset=1+S#decoder.offset};
+            _ ->
+                S#decoder{column=1+S#decoder.column,
+                          offset=1+S#decoder.offset}
+        end).
+-define(IS_WHITESPACE(C),
+        (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
+
+%% @type json_string() = atom | binary()
+%% @type json_number() = integer() | float()
+%% @type json_array() = [json_term()]
+%% @type json_object() = {struct, [{json_string(), json_term()}]}
+%% @type json_eep18_object() = {[{json_string(), json_term()}]}
+%% @type json_iolist() = {json, iolist()}
+%% @type json_term() = json_string() | json_number() | json_array() |
+%%                     json_object() | json_eep18_object() | json_iolist()
+
+-record(encoder, {handler=null,
+                  utf8=false}).
+
+-record(decoder, {object_hook=null,
+                  offset=0,
+                  line=1,
+                  column=1,
+                  state=null}).
+
+%% @spec encoder([encoder_option()]) -> function()
+%% @doc Create an encoder/1 with the given options.
+%% @type encoder_option() = handler_option() | utf8_option()
+%% @type utf8_option() = boolean(). Emit unicode as utf8 (default - false)
+encoder(Options) ->
+    State = parse_encoder_options(Options, #encoder{}),
+    fun (O) -> json_encode(O, State) end.
+
+%% @spec encode(json_term()) -> iolist()
+%% @doc Encode the given as JSON to an iolist.
+encode(Any) ->
+    json_encode(Any, #encoder{}).
+
+%% @spec decoder([decoder_option()]) -> function()
+%% @doc Create a decoder/1 with the given options.
+decoder(Options) ->
+    State = parse_decoder_options(Options, #decoder{}),
+    fun (O) -> json_decode(O, State) end.
+
+%% @spec decode(iolist(), [{format, proplist | eep18 | struct}]) -> json_term()
+%% @doc Decode the given iolist to Erlang terms using the given object format
+%%      for decoding, where proplist returns JSON objects as [{binary(), json_term()}]
+%%      proplists, eep18 returns JSON objects as {[binary(), json_term()]}, and struct
+%%      returns them as-is.
+decode(S, Options) ->
+    json_decode(S, parse_decoder_options(Options, #decoder{})).
+
+%% @spec decode(iolist()) -> json_term()
+%% @doc Decode the given iolist to Erlang terms.
+decode(S) ->
+    json_decode(S, #decoder{}).
+
+%% Internal API
+
+parse_encoder_options([], State) ->
+    State;
+parse_encoder_options([{handler, Handler} | Rest], State) ->
+    parse_encoder_options(Rest, State#encoder{handler=Handler});
+parse_encoder_options([{utf8, Switch} | Rest], State) ->
+    parse_encoder_options(Rest, State#encoder{utf8=Switch}).
+
+parse_decoder_options([], State) ->
+    State;
+parse_decoder_options([{object_hook, Hook} | Rest], State) ->
+    parse_decoder_options(Rest, State#decoder{object_hook=Hook});
+parse_decoder_options([{format, Format} | Rest], State)
+  when Format =:= struct orelse Format =:= eep18 orelse Format =:= proplist ->
+    parse_decoder_options(Rest, State#decoder{object_hook=Format}).
+
+json_encode(true, _State) ->
+    <<"true">>;
+json_encode(false, _State) ->
+    <<"false">>;
+json_encode(null, _State) ->
+    <<"null">>;
+json_encode(I, _State) when is_integer(I) ->
+    integer_to_list(I);
+json_encode(F, _State) when is_float(F) ->
+    mochinum:digits(F);
+json_encode(S, State) when is_binary(S); is_atom(S) ->
+    json_encode_string(S, State);
+json_encode([{K, _}|_] = Props, State) when (K =/= struct andalso
+                                             K =/= array andalso
+                                             K =/= json) ->
+    json_encode_proplist(Props, State);
+json_encode({struct, Props}, State) when is_list(Props) ->
+    json_encode_proplist(Props, State);
+json_encode({Props}, State) when is_list(Props) ->
+    json_encode_proplist(Props, State);
+json_encode({}, State) ->
+    json_encode_proplist([], State);
+json_encode(Array, State) when is_list(Array) ->
+    json_encode_array(Array, State);
+json_encode({array, Array}, State) when is_list(Array) ->
+    json_encode_array(Array, State);
+json_encode({json, IoList}, _State) ->
+    IoList;
+json_encode(Bad, #encoder{handler=null}) ->
+    exit({json_encode, {bad_term, Bad}});
+json_encode(Bad, State=#encoder{handler=Handler}) ->
+    json_encode(Handler(Bad), State).
+
+json_encode_array([], _State) ->
+    <<"[]">>;
+json_encode_array(L, State) ->
+    F = fun (O, Acc) ->
+                [$,, json_encode(O, State) | Acc]
+        end,
+    [$, | Acc1] = lists:foldl(F, "[", L),
+    lists:reverse([$\] | Acc1]).
+
+json_encode_proplist([], _State) ->
+    <<"{}">>;
+json_encode_proplist(Props, State) ->
+    F = fun ({K, V}, Acc) ->
+                KS = json_encode_string(K, State),
+                VS = json_encode(V, State),
+                [$,, VS, $:, KS | Acc]
+        end,
+    [$, | Acc1] = lists:foldl(F, "{", Props),
+    lists:reverse([$\} | Acc1]).
+
+json_encode_string(A, State) when is_atom(A) ->
+    json_encode_string(atom_to_binary(A, latin1), State);
+json_encode_string(B, State) when is_binary(B) ->
+    case json_bin_is_safe(B) of
+        true ->
+            [?Q, B, ?Q];
+        false ->
+            json_encode_string_unicode(unicode:characters_to_list(B), State, [?Q])
+    end;
+json_encode_string(I, _State) when is_integer(I) ->
+    [?Q, integer_to_list(I), ?Q];
+json_encode_string(L, State) when is_list(L) ->
+    case json_string_is_safe(L) of
+        true ->
+            [?Q, L, ?Q];
+        false ->
+            json_encode_string_unicode(L, State, [?Q])
+    end.
+
+json_string_is_safe([]) ->
+    true;
+json_string_is_safe([C | Rest]) ->
+    case C of
+        ?Q ->
+            false;
+        $\\ ->
+            false;
+        $\b ->
+            false;
+        $\f ->
+            false;
+        $\n ->
+            false;
+        $\r ->
+            false;
+        $\t ->
+            false;
+        C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+            false;
+        C when C < 16#7f ->
+            json_string_is_safe(Rest);
+        _ ->
+            exit({json_encode, {bad_char, C}})
+    end.
+
+json_bin_is_safe(<<>>) ->
+    true;
+json_bin_is_safe(<<C, Rest/binary>>) ->
+    case C of
+        ?Q ->
+            false;
+        $\\ ->
+            false;
+        $\b ->
+            false;
+        $\f ->
+            false;
+        $\n ->
+            false;
+        $\r ->
+            false;
+        $\t ->
+            false;
+        C when C >= 0, C < $\s; C >= 16#7f ->
+            false;
+        C when C < 16#7f ->
+            json_bin_is_safe(Rest)
+    end.
+
+json_encode_string_unicode([], _State, Acc) ->
+    lists:reverse([$\" | Acc]);
+json_encode_string_unicode([C | Cs], State, Acc) ->
+    Acc1 = case C of
+               ?Q ->
+                   [?Q, $\\ | Acc];
+               %% Escaping solidus is only useful when trying to protect
+               %% against "</script>" injection attacks which are only
+               %% possible when JSON is inserted into a HTML document
+               %% in-line. mochijson2 does not protect you from this, so
+               %% if you do insert directly into HTML then you need to
+               %% uncomment the following case or escape the output of encode.
+               %%
+               %% $/ ->
+               %%    [$/, $\\ | Acc];
+               %%
+               $\\ ->
+                   [$\\, $\\ | Acc];
+               $\b ->
+                   [$b, $\\ | Acc];
+               $\f ->
+                   [$f, $\\ | Acc];
+               $\n ->
+                   [$n, $\\ | Acc];
+               $\r ->
+                   [$r, $\\ | Acc];
+               $\t ->
+                   [$t, $\\ | Acc];
+               C when C >= 0, C < $\s ->
+                   [unihex(C) | Acc];
+               C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 ->
+                   [unicode:characters_to_binary([C]) | Acc];
+               C when  C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 ->
+                   [unihex(C) | Acc];
+               C when C < 16#7f ->
+                   [C | Acc];
+               _ ->
+                   %% json_string_is_safe guarantees that this branch is dead
+                   exit({json_encode, {bad_char, C}})
+           end,
+    json_encode_string_unicode(Cs, State, Acc1).
+
+hexdigit(C) when C >= 0, C =< 9 ->
+    C + $0;
+hexdigit(C) when C =< 15 ->
+    C + $a - 10.
+
+unihex(C) when C < 16#10000 ->
+    <<D3:4, D2:4, D1:4, D0:4>> = <<C:16>>,
+    Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
+    [$\\, $u | Digits];
+unihex(C) when C =< 16#10FFFF ->
+    N = C - 16#10000,
+    S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
+    S2 = 16#dc00 bor (N band 16#3ff),
+    [unihex(S1), unihex(S2)].
+
+json_decode(L, S) when is_list(L) ->
+    json_decode(iolist_to_binary(L), S);
+json_decode(B, S) ->
+    {Res, S1} = decode1(B, S),
+    {eof, _} = tokenize(B, S1#decoder{state=trim}),
+    Res.
+
+decode1(B, S=#decoder{state=null}) ->
+    case tokenize(B, S#decoder{state=any}) of
+        {{const, C}, S1} ->
+            {C, S1};
+        {start_array, S1} ->
+            decode_array(B, S1);
+        {start_object, S1} ->
+            decode_object(B, S1)
+    end.
+
+make_object(V, #decoder{object_hook=N}) when N =:= null orelse N =:= struct ->
+    V;
+make_object({struct, P}, #decoder{object_hook=eep18}) ->
+    {P};
+make_object({struct, P}, #decoder{object_hook=proplist}) ->
+    P;
+make_object(V, #decoder{object_hook=Hook}) ->
+    Hook(V).
+
+decode_object(B, S) ->
+    decode_object(B, S#decoder{state=key}, []).
+
+decode_object(B, S=#decoder{state=key}, Acc) ->
+    case tokenize(B, S) of
+        {end_object, S1} ->
+            V = make_object({struct, lists:reverse(Acc)}, S1),
+            {V, S1#decoder{state=null}};
+        {{const, K}, S1} ->
+            {colon, S2} = tokenize(B, S1),
+            {V, S3} = decode1(B, S2#decoder{state=null}),
+            decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc])
+    end;
+decode_object(B, S=#decoder{state=comma}, Acc) ->
+    case tokenize(B, S) of
+        {end_object, S1} ->
+            V = make_object({struct, lists:reverse(Acc)}, S1),
+            {V, S1#decoder{state=null}};
+        {comma, S1} ->
+            decode_object(B, S1#decoder{state=key}, Acc)
+    end.
+
+decode_array(B, S) ->
+    decode_array(B, S#decoder{state=any}, []).
+
+decode_array(B, S=#decoder{state=any}, Acc) ->
+    case tokenize(B, S) of
+        {end_array, S1} ->
+            {lists:reverse(Acc), S1#decoder{state=null}};
+        {start_array, S1} ->
+            {Array, S2} = decode_array(B, S1),
+            decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
+        {start_object, S1} ->
+            {Array, S2} = decode_object(B, S1),
+            decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
+        {{const, Const}, S1} ->
+            decode_array(B, S1#decoder{state=comma}, [Const | Acc])
+    end;
+decode_array(B, S=#decoder{state=comma}, Acc) ->
+    case tokenize(B, S) of
+        {end_array, S1} ->
+            {lists:reverse(Acc), S1#decoder{state=null}};
+        {comma, S1} ->
+            decode_array(B, S1#decoder{state=any}, Acc)
+    end.
+
+tokenize_string(B, S=#decoder{offset=O}) ->
+    case tokenize_string_fast(B, O) of
+        {escape, O1} ->
+            Length = O1 - O,
+            S1 = ?ADV_COL(S, Length),
+            <<_:O/binary, Head:Length/binary, _/binary>> = B,
+            tokenize_string(B, S1, lists:reverse(binary_to_list(Head)));
+        O1 ->
+            Length = O1 - O,
+            <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B,
+            {{const, String}, ?ADV_COL(S, Length + 1)}
+    end.
+
+tokenize_string_fast(B, O) ->
+    case B of
+        <<_:O/binary, ?Q, _/binary>> ->
+            O;
+        <<_:O/binary, $\\, _/binary>> ->
+            {escape, O};
+        <<_:O/binary, C1, _/binary>> when C1 < 128 ->
+            tokenize_string_fast(B, 1 + O);
+        <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
+                C2 >= 128, C2 =< 191 ->
+            tokenize_string_fast(B, 2 + O);
+        <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
+                C2 >= 128, C2 =< 191,
+                C3 >= 128, C3 =< 191 ->
+            tokenize_string_fast(B, 3 + O);
+        <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
+                C2 >= 128, C2 =< 191,
+                C3 >= 128, C3 =< 191,
+                C4 >= 128, C4 =< 191 ->
+            tokenize_string_fast(B, 4 + O);
+        _ ->
+            throw(invalid_utf8)
+    end.
+
+tokenize_string(B, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, ?Q, _/binary>> ->
+            {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)};
+        <<_:O/binary, "\\\"", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]);
+        <<_:O/binary, "\\\\", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]);
+        <<_:O/binary, "\\/", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]);
+        <<_:O/binary, "\\b", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]);
+        <<_:O/binary, "\\f", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]);
+        <<_:O/binary, "\\n", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]);
+        <<_:O/binary, "\\r", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]);
+        <<_:O/binary, "\\t", _/binary>> ->
+            tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]);
+        <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> ->
+            C = erlang:list_to_integer([C3, C2, C1, C0], 16),
+            if C > 16#D7FF, C < 16#DC00 ->
+                %% coalesce UTF-16 surrogate pair
+                <<"\\u", D3, D2, D1, D0, _/binary>> = Rest,
+                D = erlang:list_to_integer([D3,D2,D1,D0], 16),
+                Acc1 = [unicode:characters_to_binary(
+                            <<C:16/big-unsigned-integer,
+                              D:16/big-unsigned-integer>>,
+                            utf16)
+                       | Acc],
+                tokenize_string(B, ?ADV_COL(S, 12), Acc1);
+            true ->
+                Acc1 = [unicode:characters_to_binary([C]) | Acc],
+                tokenize_string(B, ?ADV_COL(S, 6), Acc1)
+            end;
+        <<_:O/binary, C1, _/binary>> when C1 < 128 ->
+            tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]);
+        <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
+                C2 >= 128, C2 =< 191 ->
+            tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]);
+        <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
+                C2 >= 128, C2 =< 191,
+                C3 >= 128, C3 =< 191 ->
+            tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]);
+        <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
+                C2 >= 128, C2 =< 191,
+                C3 >= 128, C3 =< 191,
+                C4 >= 128, C4 =< 191 ->
+            tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]);
+        _ ->
+            throw(invalid_utf8)
+    end.
+
+tokenize_number(B, S) ->
+    case tokenize_number(B, sign, S, []) of
+        {{int, Int}, S1} ->
+            {{const, list_to_integer(Int)}, S1};
+        {{float, Float}, S1} ->
+            {{const, list_to_float(Float)}, S1}
+    end.
+
+tokenize_number(B, sign, S=#decoder{offset=O}, []) ->
+    case B of
+        <<_:O/binary, $-, _/binary>> ->
+            tokenize_number(B, int, ?INC_COL(S), [$-]);
+        _ ->
+            tokenize_number(B, int, S, [])
+    end;
+tokenize_number(B, int, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, $0, _/binary>> ->
+            tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]);
+        <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 ->
+            tokenize_number(B, int1, ?INC_COL(S), [C | Acc])
+    end;
+tokenize_number(B, int1, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+            tokenize_number(B, int1, ?INC_COL(S), [C | Acc]);
+        _ ->
+            tokenize_number(B, frac, S, Acc)
+    end;
+tokenize_number(B, frac, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 ->
+            tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
+        <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
+            tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
+        _ ->
+            {{int, lists:reverse(Acc)}, S}
+    end;
+tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+            tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]);
+        <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
+            tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]);
+        _ ->
+            {{float, lists:reverse(Acc)}, S}
+    end;
+tokenize_number(B, esign, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ ->
+            tokenize_number(B, eint, ?INC_COL(S), [C | Acc]);
+        _ ->
+            tokenize_number(B, eint, S, Acc)
+    end;
+tokenize_number(B, eint, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+            tokenize_number(B, eint1, ?INC_COL(S), [C | Acc])
+    end;
+tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) ->
+    case B of
+        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
+            tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]);
+        _ ->
+            {{float, lists:reverse(Acc)}, S}
+    end.
+
+tokenize(B, S=#decoder{offset=O}) ->
+    case B of
+        <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
+            tokenize(B, ?INC_CHAR(S, C));
+        <<_:O/binary, "{", _/binary>> ->
+            {start_object, ?INC_COL(S)};
+        <<_:O/binary, "}", _/binary>> ->
+            {end_object, ?INC_COL(S)};
+        <<_:O/binary, "[", _/binary>> ->
+            {start_array, ?INC_COL(S)};
+        <<_:O/binary, "]", _/binary>> ->
+            {end_array, ?INC_COL(S)};
+        <<_:O/binary, ",", _/binary>> ->
+            {comma, ?INC_COL(S)};
+        <<_:O/binary, ":", _/binary>> ->
+            {colon, ?INC_COL(S)};
+        <<_:O/binary, "null", _/binary>> ->
+            {{const, null}, ?ADV_COL(S, 4)};
+        <<_:O/binary, "true", _/binary>> ->
+            {{const, true}, ?ADV_COL(S, 4)};
+        <<_:O/binary, "false", _/binary>> ->
+            {{const, false}, ?ADV_COL(S, 5)};
+        <<_:O/binary, "\"", _/binary>> ->
+            tokenize_string(B, ?INC_COL(S));
+        <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9)
+                                         orelse C =:= $- ->
+            tokenize_number(B, S);
+        <<_:O/binary>> ->
+            trim = S#decoder.state,
+            {eof, S}
+    end.
+%%
+%% Tests
+%%
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+%% testing constructs borrowed from the Yaws JSON implementation.
+
+%% Create an object from a list of Key/Value pairs.
+
+obj_new() ->
+    {struct, []}.
+
+is_obj({struct, Props}) ->
+    F = fun ({K, _}) when is_binary(K) -> true end,
+    lists:all(F, Props).
+
+obj_from_list(Props) ->
+    Obj = {struct, Props},
+    ?assert(is_obj(Obj)),
+    Obj.
+
+%% Test for equivalence of Erlang terms.
+%% Due to arbitrary order of construction, equivalent objects might
+%% compare unequal as erlang terms, so we need to carefully recurse
+%% through aggregates (tuples and objects).
+
+equiv({struct, Props1}, {struct, Props2}) ->
+    equiv_object(Props1, Props2);
+equiv(L1, L2) when is_list(L1), is_list(L2) ->
+    equiv_list(L1, L2);
+equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
+equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
+equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true.
+
+%% Object representation and traversal order is unknown.
+%% Use the sledgehammer and sort property lists.
+
+equiv_object(Props1, Props2) ->
+    L1 = lists:keysort(1, Props1),
+    L2 = lists:keysort(1, Props2),
+    Pairs = lists:zip(L1, L2),
+    true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
+                             equiv(K1, K2) and equiv(V1, V2)
+                     end, Pairs).
+
+%% Recursively compare tuple elements for equivalence.
+
+equiv_list([], []) ->
+    true;
+equiv_list([V1 | L1], [V2 | L2]) ->
+    equiv(V1, V2) andalso equiv_list(L1, L2).
+
+decode_test() ->
+    [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
+    <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]).
+
+e2j_vec_test() ->
+    test_one(e2j_test_vec(utf8), 1).
+
+test_one([], _N) ->
+    %% io:format("~p tests passed~n", [N-1]),
+    ok;
+test_one([{E, J} | Rest], N) ->
+    %% io:format("[~p] ~p ~p~n", [N, E, J]),
+    true = equiv(E, decode(J)),
+    true = equiv(E, decode(encode(E))),
+    test_one(Rest, 1+N).
+
+e2j_test_vec(utf8) ->
+    [
+     {1, "1"},
+     {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes
+     {-1, "-1"},
+     {-3.1416, "-3.14160"},
+     {12.0e10, "1.20000e+11"},
+     {1.234E+10, "1.23400e+10"},
+     {-1.234E-10, "-1.23400e-10"},
+     {10.0, "1.0e+01"},
+     {123.456, "1.23456E+2"},
+     {10.0, "1e1"},
+     {<<"foo">>, "\"foo\""},
+     {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""},
+     {<<"">>, "\"\""},
+     {<<"\n\n\n">>, "\"\\n\\n\\n\""},
+     {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
+     {obj_new(), "{}"},
+     {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"},
+     {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]),
+      "{\"foo\":\"bar\",\"baz\":123}"},
+     {[], "[]"},
+     {[[]], "[[]]"},
+     {[1, <<"foo">>], "[1,\"foo\"]"},
+
+     %% json array in a json object
+     {obj_from_list([{<<"foo">>, [123]}]),
+      "{\"foo\":[123]}"},
+
+     %% json object in a json object
+     {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]),
+      "{\"foo\":{\"bar\":true}}"},
+
+     %% fold evaluation order
+     {obj_from_list([{<<"foo">>, []},
+                     {<<"bar">>, obj_from_list([{<<"baz">>, true}])},
+                     {<<"alice">>, <<"bob">>}]),
+      "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
+
+     %% json object in a json array
+     {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null],
+      "[-123,\"foo\",{\"bar\":[]},null]"}
+    ].
+
+%% test utf8 encoding
+encoder_utf8_test() ->
+    %% safe conversion case (default)
+    <<"\"\\u0001\\u0442\\u0435\\u0441\\u0442\"">> =
+        iolist_to_binary(encode(<<1,"\321\202\320\265\321\201\321\202">>)),
+
+    %% raw utf8 output (optional)
+    Enc = mochijson2:encoder([{utf8, true}]),
+    <<34,"\\u0001",209,130,208,181,209,129,209,130,34>> =
+        iolist_to_binary(Enc(<<1,"\321\202\320\265\321\201\321\202">>)).
+
+input_validation_test() ->
+    Good = [
+        {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, %% pound
+        {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, %% euro
+        {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} %% denarius
+    ],
+    lists:foreach(fun({CodePoint, UTF8}) ->
+        Expect = unicode:characters_to_binary([CodePoint]),
+        Expect = decode(UTF8)
+    end, Good),
+
+    Bad = [
+        %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
+        <<?Q, 16#80, ?Q>>,
+        %% missing continuations, last byte in each should be 80-BF
+        <<?Q, 16#C2, 16#7F, ?Q>>,
+        <<?Q, 16#E0, 16#80,16#7F, ?Q>>,
+        <<?Q, 16#F0, 16#80, 16#80, 16#7F, ?Q>>,
+        %% we don't support code points > 10FFFF per RFC 3629
+        <<?Q, 16#F5, 16#80, 16#80, 16#80, ?Q>>,
+        %% escape characters trigger a different code path
+        <<?Q, $\\, $\n, 16#80, ?Q>>
+    ],
+    lists:foreach(
+      fun(X) ->
+              ok = try decode(X) catch invalid_utf8 -> ok end,
+              %% could be {ucs,{bad_utf8_character_code}} or
+              %%          {json_encode,{bad_char,_}}
+              {'EXIT', _} = (catch encode(X))
+      end, Bad).
+
+inline_json_test() ->
+    ?assertEqual(<<"\"iodata iodata\"">>,
+                 iolist_to_binary(
+                   encode({json, [<<"\"iodata">>, " iodata\""]}))),
+    ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]},
+                 decode(
+                   encode({struct,
+                           [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))),
+    ok.
+
+big_unicode_test() ->
+    UTF8Seq = unicode:characters_to_binary([16#0001d120]),
+    ?assertEqual(
+       <<"\"\\ud834\\udd20\"">>,
+       iolist_to_binary(encode(UTF8Seq))),
+    ?assertEqual(
+       UTF8Seq,
+       decode(iolist_to_binary(encode(UTF8Seq)))),
+    ok.
+
+custom_decoder_test() ->
+    ?assertEqual(
+       {struct, [{<<"key">>, <<"value">>}]},
+       (decoder([]))("{\"key\": \"value\"}")),
+    F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end,
+    ?assertEqual(
+       win,
+       (decoder([{object_hook, F}]))("{\"key\": \"value\"}")),
+    ok.
+
+atom_test() ->
+    %% JSON native atoms
+    [begin
+         ?assertEqual(A, decode(atom_to_list(A))),
+         ?assertEqual(iolist_to_binary(atom_to_list(A)),
+                      iolist_to_binary(encode(A)))
+     end || A <- [true, false, null]],
+    %% Atom to string
+    ?assertEqual(
+       <<"\"foo\"">>,
+       iolist_to_binary(encode(foo))),
+    ?assertEqual(
+       <<"\"\\ud834\\udd20\"">>,
+       iolist_to_binary(
+         encode(
+           binary_to_atom(
+             unicode:characters_to_binary([16#0001d120]), latin1)))),
+    ok.
+
+key_encode_test() ->
+    %% Some forms are accepted as keys that would not be strings in other
+    %% cases
+    ?assertEqual(
+       <<"{\"foo\":1}">>,
+       iolist_to_binary(encode({struct, [{foo, 1}]}))),
+    ?assertEqual(
+       <<"{\"foo\":1}">>,
+       iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))),
+    ?assertEqual(
+       <<"{\"foo\":1}">>,
+       iolist_to_binary(encode({struct, [{"foo", 1}]}))),
+       ?assertEqual(
+       <<"{\"foo\":1}">>,
+       iolist_to_binary(encode([{foo, 1}]))),
+    ?assertEqual(
+       <<"{\"foo\":1}">>,
+       iolist_to_binary(encode([{<<"foo">>, 1}]))),
+    ?assertEqual(
+       <<"{\"foo\":1}">>,
+       iolist_to_binary(encode([{"foo", 1}]))),
+    ?assertEqual(
+       <<"{\"\\ud834\\udd20\":1}">>,
+       iolist_to_binary(
+         encode({struct, [{[16#0001d120], 1}]}))),
+    ?assertEqual(
+       <<"{\"1\":1}">>,
+       iolist_to_binary(encode({struct, [{1, 1}]}))),
+    ok.
+
+unsafe_chars_test() ->
+    Chars = "\"\\\b\f\n\r\t",
+    [begin
+         ?assertEqual(false, json_string_is_safe([C])),
+         ?assertEqual(false, json_bin_is_safe(<<C>>)),
+         ?assertEqual(<<C>>, decode(encode(<<C>>)))
+     end || C <- Chars],
+    ?assertEqual(
+       false,
+       json_string_is_safe([16#0001d120])),
+    ?assertEqual(
+       false,
+       json_bin_is_safe(unicode:characters_to_binary([16#0001d120]))),
+    ?assertEqual(
+       [16#0001d120],
+       unicode:characters_to_list(
+         decode(
+           encode(
+             binary_to_atom(
+               unicode:characters_to_binary([16#0001d120]),
+               latin1))))),
+    ?assertEqual(
+       false,
+       json_string_is_safe([16#10ffff])),
+    ?assertEqual(
+       false,
+       json_bin_is_safe(unicode:characters_to_binary([16#10ffff]))),
+    %% solidus can be escaped but isn't unsafe by default
+    ?assertEqual(
+       <<"/">>,
+       decode(<<"\"\\/\"">>)),
+    ok.
+
+int_test() ->
+    ?assertEqual(0, decode("0")),
+    ?assertEqual(1, decode("1")),
+    ?assertEqual(11, decode("11")),
+    ok.
+
+large_int_test() ->
+    ?assertEqual(<<"-2147483649214748364921474836492147483649">>,
+        iolist_to_binary(encode(-2147483649214748364921474836492147483649))),
+    ?assertEqual(<<"2147483649214748364921474836492147483649">>,
+        iolist_to_binary(encode(2147483649214748364921474836492147483649))),
+    ok.
+
+float_test() ->
+    ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))),
+    ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))),
+    ok.
+
+handler_test() ->
+    ?assertEqual(
+       {'EXIT',{json_encode,{bad_term,{x,y}}}},
+       catch encode({x,y})),
+    F = fun ({x,y}) -> [] end,
+    ?assertEqual(
+       <<"[]">>,
+       iolist_to_binary((encoder([{handler, F}]))({x, y}))),
+    ok.
+
+encode_empty_test_() ->
+    [{A, ?_assertEqual(<<"{}">>, iolist_to_binary(encode(B)))}
+     || {A, B} <- [{"eep18 {}", {}},
+                   {"eep18 {[]}", {[]}},
+                   {"{struct, []}", {struct, []}}]].
+
+encode_test_() ->
+    P = [{<<"k">>, <<"v">>}],
+    JSON = iolist_to_binary(encode({struct, P})),
+    [{atom_to_list(F),
+      ?_assertEqual(JSON, iolist_to_binary(encode(decode(JSON, [{format, F}]))))}
+     || F <- [struct, eep18, proplist]].
+
+format_test_() ->
+    P = [{<<"k">>, <<"v">>}],
+    JSON = iolist_to_binary(encode({struct, P})),
+    [{atom_to_list(F),
+      ?_assertEqual(A, decode(JSON, [{format, F}]))}
+     || {F, A} <- [{struct, {struct, P}},
+                   {eep18, {P}},
+                   {proplist, P}]].
+
+array_test() ->
+    A = [<<"hello">>],
+    ?assertEqual(A, decode(encode({array, A}))).
+
+bad_char_test() ->
+    ?assertEqual(
+       {'EXIT', {json_encode, {bad_char, 16#110000}}},
+       catch json_string_is_safe([16#110000])).
+
+utf8_roundtrip_test_() ->
+    %% These are the boundary cases for UTF8 encoding
+    Codepoints = [%% 7 bits  -> 1 byte
+                  16#00, 16#7f,
+                  %% 11 bits -> 2 bytes
+                  16#080, 16#07ff,
+                  %% 16 bits -> 3 bytes
+                  16#0800, 16#ffff,
+                  16#d7ff, 16#e000,
+                  %% 21 bits -> 4 bytes
+                  16#010000, 16#10ffff],
+    UTF8 = unicode:characters_to_binary(Codepoints),
+    Encode = encoder([{utf8, true}]),
+    [{"roundtrip escaped",
+      ?_assertEqual(UTF8, decode(encode(UTF8)))},
+     {"roundtrip utf8",
+      ?_assertEqual(UTF8, decode(Encode(UTF8)))}].
+
+utf8_non_character_test_() ->
+    S = unicode:characters_to_binary([16#ffff, 16#fffe]),
+    [{"roundtrip escaped", ?_assertEqual(S, decode(encode(S)))},
+     {"roundtrip utf8", ?_assertEqual(S, decode((encoder([{utf8, true}]))(S)))}].
+
+-endif.
similarity index 72%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilists.erl
rename to deps/mochiweb/src/mochilists.erl
index d93b241fbfc8f60c4959f27c67ad476118cfbe97..24fa2f31d291c517068e04acbaacc2a5ed9cddb3 100644 (file)
@@ -1,5 +1,23 @@
 %% @copyright Copyright (c) 2010 Mochi Media, Inc.
 %% @author David Reid <dreid@mochimedia.com>
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Utility functions for dealing with proplists.
 
similarity index 77%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilogfile2.erl
rename to deps/mochiweb/src/mochilogfile2.erl
index b4a7e3c6222e49d63206e3a6535f309e54d84d7a..6ff8fec12ecebb85b0cf18bf0befd8735a0f4658 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2010 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Write newline delimited log files, ensuring that if a truncated
 %%      entry is found on log open then it is fixed before writing. Uses
diff --git a/deps/mochiweb/src/mochinum.erl b/deps/mochiweb/src/mochinum.erl
new file mode 100644 (file)
index 0000000..d687370
--- /dev/null
@@ -0,0 +1,372 @@
+%% @copyright 2007 Mochi Media, Inc.
+%% @author Bob Ippolito <bob@mochimedia.com>
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+%% @doc Useful numeric algorithms for floats that cover some deficiencies
+%% in the math module. More interesting is digits/1, which implements
+%% the algorithm from:
+%% http://www.cs.indiana.edu/~burger/fp/index.html
+%% See also "Printing Floating-Point Numbers Quickly and Accurately"
+%% in Proceedings of the SIGPLAN '96 Conference on Programming Language
+%% Design and Implementation.
+
+-module(mochinum).
+-author("Bob Ippolito <bob@mochimedia.com>").
+-export([digits/1, frexp/1, int_pow/2, int_ceil/1]).
+
+%% IEEE 754 Float exponent bias
+-define(FLOAT_BIAS, 1022).
+-define(MIN_EXP, -1074).
+-define(BIG_POW, 4503599627370496).
+
+%% External API
+
+%% @spec digits(number()) -> string()
+%% @doc  Returns a string that accurately represents the given integer or float
+%%       using a conservative amount of digits. Great for generating
+%%       human-readable output, or compact ASCII serializations for floats.
+digits(N) when is_integer(N) ->
+    integer_to_list(N);
+digits(0.0) ->
+    "0.0";
+digits(Float) ->
+    {Frac1, Exp1} = frexp_int(Float),
+    [Place0 | Digits0] = digits1(Float, Exp1, Frac1),
+    {Place, Digits} = transform_digits(Place0, Digits0),
+    R = insert_decimal(Place, Digits),
+    case Float < 0 of
+        true ->
+            [$- | R];
+        _ ->
+            R
+    end.
+
+%% @spec frexp(F::float()) -> {Frac::float(), Exp::float()}
+%% @doc  Return the fractional and exponent part of an IEEE 754 double,
+%%       equivalent to the libc function of the same name.
+%%       F = Frac * pow(2, Exp).
+frexp(F) ->
+    frexp1(unpack(F)).
+
+%% @spec int_pow(X::integer(), N::integer()) -> Y::integer()
+%% @doc  Moderately efficient way to exponentiate integers.
+%%       int_pow(10, 2) = 100.
+int_pow(_X, 0) ->
+    1;
+int_pow(X, N) when N > 0 ->
+    int_pow(X, N, 1).
+
+%% @spec int_ceil(F::float()) -> integer()
+%% @doc  Return the ceiling of F as an integer. The ceiling is defined as
+%%       F when F == trunc(F);
+%%       trunc(F) when F &lt; 0;
+%%       trunc(F) + 1 when F &gt; 0.
+int_ceil(X) ->
+    T = trunc(X),
+    case (X - T) of
+        Pos when Pos > 0 -> T + 1;
+        _ -> T
+    end.
+
+
+%% Internal API
+
+int_pow(X, N, R) when N < 2 ->
+    R * X;
+int_pow(X, N, R) ->
+    int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end).
+
+insert_decimal(0, S) ->
+    "0." ++ S;
+insert_decimal(Place, S) when Place > 0 ->
+    L = length(S),
+    case Place - L of
+         0 ->
+            S ++ ".0";
+        N when N < 0 ->
+            {S0, S1} = lists:split(L + N, S),
+            S0 ++ "." ++ S1;
+        N when N < 6 ->
+            %% More places than digits
+            S ++ lists:duplicate(N, $0) ++ ".0";
+        _ ->
+            insert_decimal_exp(Place, S)
+    end;
+insert_decimal(Place, S) when Place > -6 ->
+    "0." ++ lists:duplicate(abs(Place), $0) ++ S;
+insert_decimal(Place, S) ->
+    insert_decimal_exp(Place, S).
+
+insert_decimal_exp(Place, S) ->
+    [C | S0] = S,
+    S1 = case S0 of
+             [] ->
+                 "0";
+             _ ->
+                 S0
+         end,
+    Exp = case Place < 0 of
+              true ->
+                  "e-";
+              false ->
+                  "e+"
+          end,
+    [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)).
+
+
+digits1(Float, Exp, Frac) ->
+    Round = ((Frac band 1) =:= 0),
+    case Exp >= 0 of
+        true ->
+            BExp = 1 bsl Exp,
+            case (Frac =/= ?BIG_POW) of
+                true ->
+                    scale((Frac * BExp * 2), 2, BExp, BExp,
+                          Round, Round, Float);
+                false ->
+                    scale((Frac * BExp * 4), 4, (BExp * 2), BExp,
+                          Round, Round, Float)
+            end;
+        false ->
+            case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of
+                true ->
+                    scale((Frac * 2), 1 bsl (1 - Exp), 1, 1,
+                          Round, Round, Float);
+                false ->
+                    scale((Frac * 4), 1 bsl (2 - Exp), 2, 1,
+                          Round, Round, Float)
+            end
+    end.
+
+scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) ->
+    Est = int_ceil(math:log10(abs(Float)) - 1.0e-10),
+    %% Note that the scheme implementation uses a 326 element look-up table
+    %% for int_pow(10, N) where we do not.
+    case Est >= 0 of
+        true ->
+            fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est,
+                  LowOk, HighOk);
+        false ->
+            Scale = int_pow(10, -Est),
+            fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est,
+                  LowOk, HighOk)
+    end.
+
+fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) ->
+    TooLow = case HighOk of
+                 true ->
+                     (R + MPlus) >= S;
+                 false ->
+                     (R + MPlus) > S
+             end,
+    case TooLow of
+        true ->
+            [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)];
+        false ->
+            [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)]
+    end.
+
+generate(R0, S, MPlus, MMinus, LowOk, HighOk) ->
+    D = R0 div S,
+    R = R0 rem S,
+    TC1 = case LowOk of
+              true ->
+                  R =< MMinus;
+              false ->
+                  R < MMinus
+          end,
+    TC2 = case HighOk of
+              true ->
+                  (R + MPlus) >= S;
+              false ->
+                  (R + MPlus) > S
+          end,
+    case TC1 of
+        false ->
+            case TC2 of
+                false ->
+                    [D | generate(R * 10, S, MPlus * 10, MMinus * 10,
+                                  LowOk, HighOk)];
+                true ->
+                    [D + 1]
+            end;
+        true ->
+            case TC2 of
+                false ->
+                    [D];
+                true ->
+                    case R * 2 < S of
+                        true ->
+                            [D];
+                        false ->
+                            [D + 1]
+                    end
+            end
+    end.
+
+unpack(Float) ->
+    <<Sign:1, Exp:11, Frac:52>> = <<Float:64/float>>,
+    {Sign, Exp, Frac}.
+
+frexp1({_Sign, 0, 0}) ->
+    {0.0, 0};
+frexp1({Sign, 0, Frac}) ->
+    Exp = log2floor(Frac),
+    <<Frac1:64/float>> = <<Sign:1, ?FLOAT_BIAS:11, (Frac-1):52>>,
+    {Frac1, -(?FLOAT_BIAS) - 52 + Exp};
+frexp1({Sign, Exp, Frac}) ->
+    <<Frac1:64/float>> = <<Sign:1, ?FLOAT_BIAS:11, Frac:52>>,
+    {Frac1, Exp - ?FLOAT_BIAS}.
+
+log2floor(Int) ->
+    log2floor(Int, 0).
+
+log2floor(0, N) ->
+    N;
+log2floor(Int, N) ->
+    log2floor(Int bsr 1, 1 + N).
+
+
+transform_digits(Place, [0 | Rest]) ->
+    transform_digits(Place, Rest);
+transform_digits(Place, Digits) ->
+    {Place, [$0 + D || D <- Digits]}.
+
+
+frexp_int(F) ->
+    case unpack(F) of
+        {_Sign, 0, Frac} ->
+            {Frac, ?MIN_EXP};
+        {_Sign, Exp, Frac} ->
+            {Frac + (1 bsl 52), Exp - 53 - ?FLOAT_BIAS}
+    end.
+
+%%
+%% Tests
+%%
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+int_ceil_test() ->
+    ?assertEqual(1, int_ceil(0.0001)),
+    ?assertEqual(0, int_ceil(0.0)),
+    ?assertEqual(1, int_ceil(0.99)),
+    ?assertEqual(1, int_ceil(1.0)),
+    ?assertEqual(-1, int_ceil(-1.5)),
+    ?assertEqual(-2, int_ceil(-2.0)),
+    ok.
+
+int_pow_test() ->
+    ?assertEqual(1, int_pow(1, 1)),
+    ?assertEqual(1, int_pow(1, 0)),
+    ?assertEqual(1, int_pow(10, 0)),
+    ?assertEqual(10, int_pow(10, 1)),
+    ?assertEqual(100, int_pow(10, 2)),
+    ?assertEqual(1000, int_pow(10, 3)),
+    ok.
+
+digits_test() ->
+    ?assertEqual("0",
+                 digits(0)),
+    ?assertEqual("0.0",
+                 digits(0.0)),
+    ?assertEqual("1.0",
+                 digits(1.0)),
+    ?assertEqual("-1.0",
+                 digits(-1.0)),
+    ?assertEqual("0.1",
+                 digits(0.1)),
+    ?assertEqual("0.01",
+                 digits(0.01)),
+    ?assertEqual("0.001",
+                 digits(0.001)),
+    ?assertEqual("1.0e+6",
+                 digits(1000000.0)),
+    ?assertEqual("0.5",
+                 digits(0.5)),
+    ?assertEqual("4503599627370496.0",
+                 digits(4503599627370496.0)),
+    %% small denormalized number
+    %% 4.94065645841246544177e-324 =:= 5.0e-324
+    <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
+    ?assertEqual("5.0e-324",
+                 digits(SmallDenorm)),
+    ?assertEqual(SmallDenorm,
+                 list_to_float(digits(SmallDenorm))),
+    %% large denormalized number
+    %% 2.22507385850720088902e-308
+    <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
+    ?assertEqual("2.225073858507201e-308",
+                 digits(BigDenorm)),
+    ?assertEqual(BigDenorm,
+                 list_to_float(digits(BigDenorm))),
+    %% small normalized number
+    %% 2.22507385850720138309e-308
+    <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
+    ?assertEqual("2.2250738585072014e-308",
+                 digits(SmallNorm)),
+    ?assertEqual(SmallNorm,
+                 list_to_float(digits(SmallNorm))),
+    %% large normalized number
+    %% 1.79769313486231570815e+308
+    <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
+    ?assertEqual("1.7976931348623157e+308",
+                 digits(LargeNorm)),
+    ?assertEqual(LargeNorm,
+                 list_to_float(digits(LargeNorm))),
+    %% issue #10 - mochinum:frexp(math:pow(2, -1074)).
+    ?assertEqual("5.0e-324",
+                 digits(math:pow(2, -1074))),
+    ok.
+
+frexp_test() ->
+    %% zero
+    ?assertEqual({0.0, 0}, frexp(0.0)),
+    %% one
+    ?assertEqual({0.5, 1}, frexp(1.0)),
+    %% negative one
+    ?assertEqual({-0.5, 1}, frexp(-1.0)),
+    %% small denormalized number
+    %% 4.94065645841246544177e-324
+    <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
+    ?assertEqual({0.5, -1073}, frexp(SmallDenorm)),
+    %% large denormalized number
+    %% 2.22507385850720088902e-308
+    <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
+    ?assertEqual(
+       {0.99999999999999978, -1022},
+       frexp(BigDenorm)),
+    %% small normalized number
+    %% 2.22507385850720138309e-308
+    <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
+    ?assertEqual({0.5, -1021}, frexp(SmallNorm)),
+    %% large normalized number
+    %% 1.79769313486231570815e+308
+    <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
+    ?assertEqual(
+        {0.99999999999999989, 1024},
+        frexp(LargeNorm)),
+    %% issue #10 - mochinum:frexp(math:pow(2, -1074)).
+    ?assertEqual(
+       {0.5, -1073},
+       frexp(math:pow(2, -1074))),
+    ok.
+
+-endif.
similarity index 85%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochitemp.erl
rename to deps/mochiweb/src/mochitemp.erl
index f64876d974c978f28045973211b13f86e57744e6..bd3c96551c80745f704314439c07d74f6260c1c4 100644 (file)
@@ -1,7 +1,25 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2010 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
-%% @doc Create temporary files and directories.
+%% @doc Create temporary files and directories. Requires crypto to be started.
 
 -module(mochitemp).
 -export([gettempdir/0]).
@@ -87,7 +105,7 @@ rngchars(N) ->
     [rngchar() | rngchars(N - 1)].
 
 rngchar() ->
-    rngchar(mochiweb_util:rand_uniform(0, tuple_size(?SAFE_CHARS))).
+    rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))).
 
 rngchar(C) ->
     element(1 + C, ?SAFE_CHARS).
@@ -177,6 +195,7 @@ gettempdir_cwd_test() ->
     ok.
 
 rngchars_test() ->
+    crypto:start(),
     ?assertEqual(
        "",
        rngchars(0)),
@@ -198,6 +217,7 @@ rngchar_test() ->
     ok.
 
 mkdtemp_n_failonce_test() ->
+    crypto:start(),
     D = mkdtemp(),
     Path = filename:join([D, "testdir"]),
     %% Toggle the existence of a dir so that it fails
@@ -244,6 +264,7 @@ make_dir_fail_test() ->
     ok.
 
 mkdtemp_test() ->
+    crypto:start(),
     D = mkdtemp(),
     ?assertEqual(
        true,
@@ -254,6 +275,7 @@ mkdtemp_test() ->
     ok.
 
 rmtempdir_test() ->
+    crypto:start(),
     D1 = mkdtemp(),
     ?assertEqual(
        true,
similarity index 84%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiutf8.erl
rename to deps/mochiweb/src/mochiutf8.erl
index c9d2751dd4448524d23d13027169031ead5668a6..bf0e7cc25766bf629234908a5d725c13774f6c2f 100644 (file)
@@ -1,5 +1,23 @@
 %% @copyright 2010 Mochi Media, Inc.
 %% @author Bob Ippolito <bob@mochimedia.com>
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Algorithm to convert any binary to a valid UTF-8 sequence by ignoring
 %%      invalid bytes.
 
 %% External API
 
-%% -type unichar_low() :: 0..16#d7ff.
-%% -type unichar_high() :: 16#e000..16#10ffff.
-%% -type unichar() :: unichar_low() | unichar_high().
+-type unichar_low() :: 0..16#d7ff.
+-type unichar_high() :: 16#e000..16#10ffff.
+-type unichar() :: unichar_low() | unichar_high().
 
-%% -spec codepoint_to_bytes(unichar()) -> binary().
+-spec codepoint_to_bytes(unichar()) -> binary().
 %% @doc Convert a unicode codepoint to UTF-8 bytes.
 codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) ->
     %% U+0000 - U+007F - 7 bits
@@ -40,12 +58,12 @@ codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) ->
       2#10:2, B1:6,
       2#10:2, B0:6>>.
 
-%% -spec codepoints_to_bytes([unichar()]) -> binary().
+-spec codepoints_to_bytes([unichar()]) -> binary().
 %% @doc Convert a list of codepoints to a UTF-8 binary.
 codepoints_to_bytes(L) ->
     <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>.
 
-%% -spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
+-spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
 read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) ->
     %% U+0000 - U+007F - 7 bits
     <<B:1/binary, _/binary>> = Bin,
@@ -82,32 +100,32 @@ read_codepoint(Bin = <<2#11110:5, B3:3,
             {C, B, Rest}
     end.
 
-%% -spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
+-spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
 codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) ->
     Acc;
 codepoint_foldl(F, Acc, Bin) ->
     {C, _, Rest} = read_codepoint(Bin),
     codepoint_foldl(F, F(C, Acc), Rest).
 
-%% -spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
+-spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
 bytes_foldl(F, Acc, <<>>) when is_function(F, 2) ->
     Acc;
 bytes_foldl(F, Acc, Bin) ->
     {_, B, Rest} = read_codepoint(Bin),
     bytes_foldl(F, F(B, Acc), Rest).
 
-%% -spec bytes_to_codepoints(binary()) -> [unichar()].
+-spec bytes_to_codepoints(binary()) -> [unichar()].
 bytes_to_codepoints(B) ->
     lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)).
 
-%% -spec len(binary()) -> non_neg_integer().
+-spec len(binary()) -> non_neg_integer().
 len(<<>>) ->
     0;
 len(B) ->
     {_, _, Rest} = read_codepoint(B),
     1 + len(Rest).
 
-%% -spec valid_utf8_bytes(B::binary()) -> binary().
+-spec valid_utf8_bytes(B::binary()) -> binary().
 %% @doc Return only the bytes in B that represent valid UTF-8. Uses
 %%      the following recursive algorithm: skip one byte if B does not
 %%      follow UTF-8 syntax (a 1-4 byte encoding of some number),
@@ -118,7 +136,7 @@ valid_utf8_bytes(B) when is_binary(B) ->
 
 %% Internal API
 
-%% -spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
+-spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
 %% @doc Return B, but skipping the 0-based indexes in L.
 binary_skip_bytes(B, []) ->
     B;
@@ -126,7 +144,7 @@ binary_skip_bytes(B, L) ->
     binary_skip_bytes(B, L, 0, []).
 
 %% @private
-%% -spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
+-spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
 binary_skip_bytes(B, [], _N, Acc) ->
     iolist_to_binary(lists:reverse([B | Acc]));
 binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
@@ -134,13 +152,13 @@ binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
 binary_skip_bytes(<<C, RestB/binary>>, L, N, Acc) ->
     binary_skip_bytes(RestB, L, 1 + N, [C | Acc]).
 
-%% -spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
+-spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
 %% @doc Return the 0-based indexes in B that are not valid UTF-8.
 invalid_utf8_indexes(B) ->
     invalid_utf8_indexes(B, 0, []).
 
 %% @private.
-%% -spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
+-spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
 invalid_utf8_indexes(<<C, Rest/binary>>, N, Acc) when C < 16#80 ->
     %% U+0000 - U+007F - 7 bits
     invalid_utf8_indexes(Rest, 1 + N, Acc);
diff --git a/deps/mochiweb/src/mochiweb.app.src b/deps/mochiweb/src/mochiweb.app.src
new file mode 100644 (file)
index 0000000..30fa905
--- /dev/null
@@ -0,0 +1,8 @@
+{application,mochiweb,
+             [{description,"MochiMedia Web Server"},
+              {vsn,"2.13.1"},
+              {modules,[]},
+              {registered,[]},
+              {env,[]},
+              {applications,[kernel,stdlib,crypto,inets,ssl,xmerl,compiler,
+                             syntax_tools]}]}.
diff --git a/deps/mochiweb/src/mochiweb.erl b/deps/mochiweb/src/mochiweb.erl
new file mode 100644 (file)
index 0000000..14480c2
--- /dev/null
@@ -0,0 +1,101 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+%% @doc Start and stop the MochiWeb server.
+
+-module(mochiweb).
+-author('bob@mochimedia.com').
+
+-export([new_request/1, new_response/1]).
+-export([all_loaded/0, all_loaded/1, reload/0]).
+-export([ensure_started/1]).
+
+reload() ->
+    [c:l(Module) || Module <- all_loaded()].
+
+all_loaded() ->
+    all_loaded(filename:dirname(code:which(?MODULE))).
+
+all_loaded(Base) when is_atom(Base) ->
+    [];
+all_loaded(Base) ->
+    FullBase = Base ++ "/",
+    F = fun ({_Module, Loaded}, Acc) when is_atom(Loaded) ->
+                Acc;
+            ({Module, Loaded}, Acc) ->
+                case lists:prefix(FullBase, Loaded) of
+                    true ->
+                        [Module | Acc];
+                    false ->
+                        Acc
+                end
+        end,
+    lists:foldl(F, [], code:all_loaded()).
+
+%% See the erlang:decode_packet/3 docs for the full type
+-spec uri(HttpUri :: term()) -> string().
+uri({abs_path, Uri}) ->
+    Uri;
+%% TODO:
+%% This makes it hard to implement certain kinds of proxies with mochiweb,
+%% perhaps a field could be added to the mochiweb_request record to preserve
+%% this information in raw_path.
+uri({absoluteURI, _Protocol, _Host, _Port, Uri}) ->
+    Uri;
+%% From http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
+uri('*') ->
+    "*";
+%% Erlang decode_packet will return this for requests like `CONNECT host:port`
+uri({scheme, Hostname, Port}) ->
+    Hostname ++ ":" ++ Port;
+uri(HttpString) when is_list(HttpString) ->
+    HttpString.
+
+%% @spec new_request( {Socket, Request, Headers}
+%%                  | {Socket, Opts, Request, Headers} ) -> MochiWebRequest
+%% @doc Return a mochiweb_request data structure.
+new_request({Socket, {Method, HttpUri, Version}, Headers}) ->
+    new_request({Socket, [], {Method, HttpUri, Version}, Headers});
+
+new_request({Socket, Opts, {Method, HttpUri, Version}, Headers}) ->
+    mochiweb_request:new(Socket,
+                         Opts,
+                         Method,
+                         uri(HttpUri),
+                         Version,
+                         mochiweb_headers:make(Headers)).
+
+%% @spec new_response({Request, integer(), Headers}) -> MochiWebResponse
+%% @doc Return a mochiweb_response data structure.
+new_response({Request, Code, Headers}) ->
+    mochiweb_response:new(Request,
+                          Code,
+                          mochiweb_headers:make(Headers)).
+
+%% @spec ensure_started(App::atom()) -> ok
+%% @doc Start the given App if it has not been started already.
+ensure_started(App) ->
+    case application:start(App) of
+        ok ->
+            ok;
+        {error, {already_started, App}} ->
+            ok
+    end.
diff --git a/deps/mochiweb/src/mochiweb_acceptor.erl b/deps/mochiweb/src/mochiweb_acceptor.erl
new file mode 100644 (file)
index 0000000..44ce91f
--- /dev/null
@@ -0,0 +1,83 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+%% @doc MochiWeb acceptor.
+
+-module(mochiweb_acceptor).
+-author('bob@mochimedia.com').
+
+-include("internal.hrl").
+
+-export([start_link/3, start_link/4, init/4]).
+
+-define(EMFILE_SLEEP_MSEC, 100).
+
+start_link(Server, Listen, Loop) ->
+    start_link(Server, Listen, Loop, []).
+
+start_link(Server, Listen, Loop, Opts) ->
+    proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop, Opts]).
+
+do_accept(Server, Listen) ->
+    T1 = os:timestamp(),
+    case mochiweb_socket:transport_accept(Listen) of
+        {ok, Socket} ->
+            gen_server:cast(Server, {accepted, self(), timer:now_diff(os:timestamp(), T1)}),
+            mochiweb_socket:finish_accept(Socket);
+        Other ->
+            Other
+    end.
+
+init(Server, Listen, Loop, Opts) ->
+    case catch do_accept(Server, Listen) of
+        {ok, Socket} ->
+            call_loop(Loop, Socket, Opts);
+        {error, Err} when Err =:= closed orelse
+                          Err =:= esslaccept orelse
+                          Err =:= timeout ->
+            exit(normal);
+        Other ->
+            %% Mitigate out of file descriptor scenario by sleeping for a
+            %% short time to slow error rate
+            case Other of
+                {error, emfile} ->
+                    receive
+                    after ?EMFILE_SLEEP_MSEC ->
+                            ok
+                    end;
+                _ ->
+                    ok
+            end,
+            error_logger:error_report(
+              [{application, mochiweb},
+               "Accept failed error",
+               lists:flatten(io_lib:format("~p", [Other]))]),
+            exit({error, accept_failed})
+    end.
+
+call_loop({M, F}, Socket, Opts) ->
+    M:F(Socket, Opts);
+call_loop({M, F, [A1]}, Socket, Opts) ->
+    M:F(Socket, Opts, A1);
+call_loop({M, F, A}, Socket, Opts) ->
+    erlang:apply(M, F, [Socket, Opts | A]);
+call_loop(Loop, Socket, Opts) ->
+    Loop(Socket, Opts).
similarity index 71%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_base64url.erl
rename to deps/mochiweb/src/mochiweb_base64url.erl
index ab5aaece4c5248ec6e977409ddb959908b6a24b4..e6a8e13f90f08237e359f7b4740083a61d55a858 100644 (file)
@@ -1,5 +1,27 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2013 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
 -module(mochiweb_base64url).
 -export([encode/1, decode/1]).
+
 %% @doc URL and filename safe base64 variant with no padding,
 %% also known as "base64url" per RFC 4648.
 %%
 %% '_' is used in place of '/' (63),
 %% padding is implicit rather than explicit ('=').
 
--spec encode(iolist()) -> binary().
+-spec encode(iolist() | binary()) -> binary().
 encode(B) when is_binary(B) ->
     encode_binary(B);
 encode(L) when is_list(L) ->
     encode_binary(iolist_to_binary(L)).
 
--spec decode(iolist()) -> binary().
+-spec decode(iolist() | binary()) -> binary().
 decode(B) when is_binary(B) ->
     decode_binary(B);
 decode(L) when is_list(L) ->
similarity index 98%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_charref.erl
rename to deps/mochiweb/src/mochiweb_charref.erl
index 665d0f9f452ec8080708722854ab9420755384ec..143452e5ba564b7099f372ff2851028fcefc1777 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Converts HTML 5 charrefs and entities to codepoints (or lists of code points).
 -module(mochiweb_charref).
@@ -11,7 +29,7 @@
 %%      codepoint, or return undefined on failure.
 %%      The input should not include an ampersand or semicolon.
 %%      charref("#38") = 38, charref("#x26") = 38, charref("amp") = 38.
-%% -spec charref(binary() | string()) -> integer() | [integer()] | undefined.
+-spec charref(binary() | string()) -> integer() | [integer()] | undefined.
 charref(B) when is_binary(B) ->
     charref(binary_to_list(B));
 charref([$#, C | L]) when C =:= $x orelse C =:= $X ->
diff --git a/deps/mochiweb/src/mochiweb_clock.erl b/deps/mochiweb/src/mochiweb_clock.erl
new file mode 100644 (file)
index 0000000..4f101c5
--- /dev/null
@@ -0,0 +1,101 @@
+%% Copyright (c) 2011-2014, Loïc Hoguin <essen@ninenines.eu>
+%% Copyright (c) 2015, Robert Kowalski <rok@kowalski.gd>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% While a gen_server process runs in the background to update
+%% the cache of formatted dates every second, all API calls are
+%% local and directly read from the ETS cache table, providing
+%% fast time and date computations.
+
+-module(mochiweb_clock).
+
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+-export([start/0]).
+-export([stop/0]).
+-export([rfc1123/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+    gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+-spec start() -> {ok, pid()}.
+start() ->
+    gen_server:start({local, ?MODULE}, ?MODULE, [], []).
+
+-spec stop() -> stopped.
+stop() ->
+    gen_server:call(?MODULE, stop).
+
+-spec rfc1123() -> string().
+rfc1123() ->
+    case ets:lookup(?MODULE, rfc1123) of
+        [{rfc1123, Date}] ->
+            Date;
+        [] ->
+            ""
+    end.
+
+%% gen_server.
+
+-spec init([]) -> {ok, #state{}}.
+init([]) ->
+    ?MODULE = ets:new(?MODULE, [named_table, protected, {read_concurrency, true}]),
+    handle_info(update_date, #state{}),
+    timer:send_interval(1000, update_date),
+    {ok, #state{}}.
+
+-type from() :: {pid(), term()}.
+-spec handle_call
+    (stop, from(), State) -> {stop, normal, stopped, State}
+    when State::#state{}.
+handle_call(stop, _From, State) ->
+    {stop, normal, stopped, State};
+handle_call(_Request, _From, State) ->
+    {reply, ignored, State}.
+
+-spec handle_cast(_, State) -> {noreply, State} when State::#state{}.
+handle_cast(_Msg, State) ->
+    {noreply, State}.
+
+-spec handle_info(any(), State) -> {noreply, State} when State::#state{}.
+handle_info(update_date, State) ->
+    Date = httpd_util:rfc1123_date(),
+    ets:insert(?MODULE, {rfc1123, Date}),
+    {noreply, State};
+handle_info(_Info, State) ->
+    {noreply, State}.
+
+-spec terminate(_, _) -> ok.
+terminate(_Reason, _State) ->
+    ok.
+
+-spec code_change(_, State, _) -> {ok, State} when State::#state{}.
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
similarity index 90%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cookies.erl
rename to deps/mochiweb/src/mochiweb_cookies.erl
index 1cc4e91f3fb220513258faf45f8972f17157b404..95390418336a61bb55a1e770439f96578c3850fa 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Emad El-Haraty <emad@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc HTTP Cookie parsing and generating (RFC 2109, RFC 2965).
 
similarity index 64%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cover.erl
rename to deps/mochiweb/src/mochiweb_cover.erl
index aa075d5bc9b6ed70966997a62cffc4503d991c6f..ebc2c1883c8061be2977af0511172c96386ee954 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2010 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Workarounds for various cover deficiencies.
 -module(mochiweb_cover).
diff --git a/deps/mochiweb/src/mochiweb_echo.erl b/deps/mochiweb/src/mochiweb_echo.erl
new file mode 100644 (file)
index 0000000..b14505c
--- /dev/null
@@ -0,0 +1,59 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
+%% @doc Simple and stupid echo server to demo mochiweb_socket_server.
+
+-module(mochiweb_echo).
+-author('bob@mochimedia.com').
+-export([start/0, stop/0, loop/1]).
+
+stop() ->
+    mochiweb_socket_server:stop(?MODULE).
+
+start() ->
+    mochiweb_socket_server:start([{link, false} | options()]).
+
+options() ->
+    [{name, ?MODULE},
+     {port, 6789},
+     {ip, "127.0.0.1"},
+     {max, 1},
+     {loop, {?MODULE, loop}}].
+
+loop(Socket) ->
+    case mochiweb_socket:recv(Socket, 0, 30000) of
+        {ok, Data} ->
+            case mochiweb_socket:send(Socket, Data) of
+                ok ->
+                    loop(Socket);
+                _ ->
+                    exit(normal)
+            end;
+        _Other ->
+            exit(normal)
+    end.
+
+%%
+%% Tests
+%%
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+-endif.
similarity index 93%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_headers.erl
rename to deps/mochiweb/src/mochiweb_headers.erl
index b49cf9e7b668089120dd34e755147b42f039e2f2..457758f967f2d2479c31a11062b7296003bde2a0 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Case preserving (but case insensitive) HTTP Header dictionary.
 
similarity index 91%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_html.erl
rename to deps/mochiweb/src/mochiweb_html.erl
index 3732924a650387fd91678b302c375a0883456087..70723af4f140fe5f22edffd612ac3e4d82d6b331 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Loosely tokenizes and generates parse trees for HTML 4.
 -module(mochiweb_html).
@@ -621,13 +639,44 @@ find_gt(Bin, S=#decoder{offset=O}, HasSlash) ->
 
 tokenize_charref(Bin, S=#decoder{offset=O}) ->
     try
-        tokenize_charref(Bin, S, O)
+        case tokenize_charref_raw(Bin, S, O) of
+            {C1, S1} when C1 >= 16#D800 andalso C1 =< 16#DFFF ->
+                %% Surrogate pair
+                tokeninize_charref_surrogate_pair(Bin, S1, C1);
+            {Unichar, S1} when is_integer(Unichar) ->
+                {{data, mochiutf8:codepoint_to_bytes(Unichar), false},
+                 S1};
+            {Unichars, S1} when is_list(Unichars) ->
+                {{data, unicode:characters_to_binary(Unichars), false},
+                 S1};
+            {undefined, _} ->
+                throw(invalid_charref)
+        end
     catch
         throw:invalid_charref ->
             {{data, <<"&">>, false}, S}
     end.
 
-tokenize_charref(Bin, S=#decoder{offset=O}, Start) ->
+tokeninize_charref_surrogate_pair(Bin, S=#decoder{offset=O}, C1) ->
+    case Bin of
+        <<_:O/binary, $&, _/binary>> ->
+            case tokenize_charref_raw(Bin, ?INC_COL(S), O + 1) of
+                {C2, S1} when C2 >= 16#D800 andalso C1 =< 16#DFFF ->
+                    {{data,
+                      unicode:characters_to_binary(
+                        <<C1:16, C2:16>>,
+                        utf16,
+                        utf8),
+                      false},
+                     S1};
+                _ ->
+                    throw(invalid_charref)
+            end;
+        _ ->
+            throw(invalid_charref)
+    end.
+
+tokenize_charref_raw(Bin, S=#decoder{offset=O}, Start) ->
     case Bin of
         <<_:O/binary>> ->
             throw(invalid_charref);
@@ -640,17 +689,9 @@ tokenize_charref(Bin, S=#decoder{offset=O}, Start) ->
         <<_:O/binary, $;, _/binary>> ->
             Len = O - Start,
             <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
-            Data = case mochiweb_charref:charref(Raw) of
-                       undefined ->
-                           throw(invalid_charref);
-                       Unichar when is_integer(Unichar) ->
-                           mochiutf8:codepoint_to_bytes(Unichar);
-                       Unichars when is_list(Unichars) ->
-                           unicode:characters_to_binary(Unichars)
-                   end,
-            {{data, Data, false}, ?INC_COL(S)};
+            {mochiweb_charref:charref(Raw), ?INC_COL(S)};
         _ ->
-            tokenize_charref(Bin, ?INC_COL(S), Start)
+            tokenize_charref_raw(Bin, ?INC_COL(S), Start)
     end.
 
 tokenize_doctype(Bin, S) ->
similarity index 61%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_http.erl
rename to deps/mochiweb/src/mochiweb_http.erl
index ae6410f1e49186b4bd20577d1a73047f5b09e4ec..568019ffcd7c8033f48703082fb31de9ba69b2c1 100644 (file)
@@ -1,12 +1,30 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc HTTP server.
 
 -module(mochiweb_http).
 -author('bob@mochimedia.com').
 -export([start/1, start_link/1, stop/0, stop/1]).
--export([loop/2]).
+-export([loop/3]).
 -export([after_response/2, reentry/1]).
 -export([parse_range_request/1, range_skip_length/2]).
 
 -define(DEFAULTS, [{name, ?MODULE},
                    {port, 8888}]).
 
+-ifdef(gen_tcp_r15b_workaround).
+r15b_workaround() -> true.
+-else.
+r15b_workaround() -> false.
+-endif.
+
 parse_options(Options) ->
     {loop, HttpLoop} = proplists:lookup(loop, Options),
     Loop = {?MODULE, loop, [HttpLoop]},
@@ -34,51 +58,48 @@ stop(Name) ->
 %%     Option = {name, atom()} | {ip, string() | tuple()} | {backlog, integer()}
 %%              | {nodelay, boolean()} | {acceptor_pool_size, integer()}
 %%              | {ssl, boolean()} | {profile_fun, undefined | (Props) -> ok}
-%%              | {link, false}
+%%              | {link, false} | {recbuf, undefined | non_negative_integer()}
 %% @doc Start a mochiweb server.
 %%      profile_fun is used to profile accept timing.
 %%      After each accept, if defined, profile_fun is called with a proplist of a subset of the mochiweb_socket_server state and timing information.
 %%      The proplist is as follows: [{name, Name}, {port, Port}, {active_sockets, ActiveSockets}, {timing, Timing}].
 %% @end
 start(Options) ->
+    ok = ensure_started(mochiweb_clock),
     mochiweb_socket_server:start(parse_options(Options)).
 
 start_link(Options) ->
+    ok = ensure_started(mochiweb_clock),
     mochiweb_socket_server:start_link(parse_options(Options)).
 
-loop(Socket, Body) ->
-    ok = mochiweb_socket:setopts(Socket, [{packet, http}]),
-    request(Socket, Body).
+ensure_started(M) ->
+    case M:start() of
+        {ok, _Pid} ->
+            ok;
+        {error, {already_started, _Pid}} ->
+            ok
+    end.
 
--ifdef(gen_tcp_r15b_workaround).
--define(R15B_GEN_TCP_FIX, {tcp_error,_,emsgsize} ->
-                 % R15B02 returns this then closes the socket, so close and exit
-                 mochiweb_socket:close(Socket),
-                 exit(normal);
-       ).
--else.
--define(R15B_GEN_TCP_FIX,).
--endif.
+loop(Socket, Opts, Body) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, http}])),
+    request(Socket, Opts, Body).
 
-request(Socket, Body) ->
-    ok = mochiweb_socket:setopts(Socket, [{active, once}]),
+request(Socket, Opts, Body) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{active, once}])),
     receive
         {Protocol, _, {http_request, Method, Path, Version}} when Protocol == http orelse Protocol == ssl ->
-            ok = mochiweb_socket:setopts(Socket, [{packet, httph}]),
-            headers(Socket, {Method, Path, Version}, [], Body, 0);
+            ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, httph}])),
+            headers(Socket, Opts, {Method, Path, Version}, [], Body, 0);
         {Protocol, _, {http_error, "\r\n"}} when Protocol == http orelse Protocol == ssl ->
-            request(Socket, Body);
+            request(Socket, Opts, Body);
         {Protocol, _, {http_error, "\n"}} when Protocol == http orelse Protocol == ssl ->
-            request(Socket, Body);
+            request(Socket, Opts, Body);
         {tcp_closed, _} ->
             mochiweb_socket:close(Socket),
             exit(normal);
         {ssl_closed, _} ->
             mochiweb_socket:close(Socket),
-            exit(normal);
-        ?R15B_GEN_TCP_FIX
-        _Other ->
-            handle_invalid_request(Socket)
+            exit(normal)
     after ?REQUEST_RECV_TIMEOUT ->
         mochiweb_socket:close(Socket),
         exit(normal)
@@ -89,26 +110,25 @@ reentry(Body) ->
             ?MODULE:after_response(Body, Req)
     end.
 
-headers(Socket, Request, Headers, _Body, ?MAX_HEADERS) ->
+headers(Socket, Opts, Request, Headers, _Body, ?MAX_HEADERS) ->
     %% Too many headers sent, bad request.
-    ok = mochiweb_socket:setopts(Socket, [{packet, raw}]),
-    handle_invalid_request(Socket, Request, Headers);
-headers(Socket, Request, Headers, Body, HeaderCount) ->
-    ok = mochiweb_socket:setopts(Socket, [{active, once}]),
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
+    handle_invalid_request(Socket, Opts, Request, Headers);
+headers(Socket, Opts, Request, Headers, Body, HeaderCount) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{active, once}])),
     receive
         {Protocol, _, http_eoh} when Protocol == http orelse Protocol == ssl ->
-            Req = new_request(Socket, Request, Headers),
+            Req = new_request(Socket, Opts, Request, Headers),
             call_body(Body, Req),
             ?MODULE:after_response(Body, Req);
         {Protocol, _, {http_header, _, Name, _, Value}} when Protocol == http orelse Protocol == ssl ->
-            headers(Socket, Request, [{Name, Value} | Headers], Body,
+            headers(Socket, Opts, Request, [{Name, Value} | Headers], Body,
                     1 + HeaderCount);
         {tcp_closed, _} ->
             mochiweb_socket:close(Socket),
             exit(normal);
-        ?R15B_GEN_TCP_FIX
-        _Other ->
-            handle_invalid_request(Socket, Request, Headers)
+        {tcp_error, _, emsgsize} = Other ->
+            handle_invalid_msg_request(Other, Socket, Opts, Request, Headers)
     after ?HEADERS_RECV_TIMEOUT ->
         mochiweb_socket:close(Socket),
         exit(normal)
@@ -121,21 +141,27 @@ call_body({M, F}, Req) ->
 call_body(Body, Req) ->
     Body(Req).
 
-%% -spec handle_invalid_request(term()) -> no_return().
-handle_invalid_request(Socket) ->
-    handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []),
-    exit(normal).
+-spec handle_invalid_msg_request(term(), term(), term(), term(), term()) -> no_return().
+handle_invalid_msg_request(Msg, Socket, Opts, Request, RevHeaders) ->
+    case {Msg, r15b_workaround()} of
+        {{tcp_error,_,emsgsize}, true} ->
+            %% R15B02 returns this then closes the socket, so close and exit
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        _ ->
+            handle_invalid_request(Socket, Opts, Request, RevHeaders)
+    end.
 
-%% -spec handle_invalid_request(term(), term(), term()) -> no_return().
-handle_invalid_request(Socket, Request, RevHeaders) ->
-    Req = new_request(Socket, Request, RevHeaders),
+-spec handle_invalid_request(term(), term(), term(), term()) -> no_return().
+handle_invalid_request(Socket, Opts, Request, RevHeaders) ->
+    Req = new_request(Socket, Opts, Request, RevHeaders),
     Req:respond({400, [], []}),
     mochiweb_socket:close(Socket),
     exit(normal).
 
-new_request(Socket, Request, RevHeaders) ->
-    ok = mochiweb_socket:setopts(Socket, [{packet, raw}]),
-    mochiweb:new_request({Socket, Request, lists:reverse(RevHeaders)}).
+new_request(Socket, Opts, Request, RevHeaders) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
+    mochiweb:new_request({Socket, Opts, Request, lists:reverse(RevHeaders)}).
 
 after_response(Body, Req) ->
     Socket = Req:get(socket),
@@ -146,15 +172,14 @@ after_response(Body, Req) ->
         false ->
             Req:cleanup(),
             erlang:garbage_collect(),
-            ?MODULE:loop(Socket, Body)
+            ?MODULE:loop(Socket, mochiweb_request:get(opts, Req), Body)
     end.
 
-parse_range_request("bytes=0-") ->
-    undefined;
 parse_range_request(RawRange) when is_list(RawRange) ->
     try
         "bytes=" ++ RangeString = RawRange,
-        Ranges = string:tokens(RangeString, ","),
+        RangeTokens = [string:strip(R) || R <- string:tokens(RangeString, ",")],
+        Ranges = [R || R <- RangeTokens, string:len(R) > 0],
         lists:map(fun ("-" ++ V)  ->
                           {none, list_to_integer(V)};
                       (R) ->
@@ -181,11 +206,9 @@ range_skip_length(Spec, Size) ->
             {R, Size - R};
         {_OutOfRange, none} ->
             invalid_range;
-        {Start, End} when 0 =< Start, Start =< End, End < Size ->
-            {Start, End - Start + 1};
-        {Start, End} when 0 =< Start, Start =< End, End >= Size ->
-            {Start, Size - Start};
-        {_OutOfRange, _End} ->
+        {Start, End} when Start >= 0, Start < Size, Start =< End ->
+            {Start, erlang:min(End + 1, Size) - Start};
+        {_InvalidStart, _InvalidEnd} ->
             invalid_range
     end.
 
@@ -202,7 +225,7 @@ range_test() ->
     ?assertEqual([{none, 20}], parse_range_request("bytes=-20")),
 
     %% trivial single range
-    ?assertEqual(undefined, parse_range_request("bytes=0-")),
+    ?assertEqual([{0, none}], parse_range_request("bytes=0-")),
 
     %% invalid, single ranges
     ?assertEqual(fail, parse_range_request("")),
@@ -217,6 +240,19 @@ range_test() ->
        [{20, none}, {50, 100}, {none, 200}],
        parse_range_request("bytes=20-,50-100,-200")),
 
+    %% valid, multiple range with whitespace
+    ?assertEqual(
+       [{20, 30}, {50, 100}, {110, 200}],
+       parse_range_request("bytes=20-30, 50-100 , 110-200")),
+
+    %% valid, multiple range with extra commas
+    ?assertEqual(
+       [{20, 30}, {50, 100}, {110, 200}],
+       parse_range_request("bytes=20-30,,50-100,110-200")),
+    ?assertEqual(
+       [{20, 30}, {50, 100}, {110, 200}],
+       parse_range_request("bytes=20-30, ,50-100,,,110-200")),
+
     %% no ranges
     ?assertEqual([], parse_range_request("bytes=")),
     ok.
@@ -236,6 +272,7 @@ range_skip_length_test() ->
     ?assertEqual({BodySize, 0}, range_skip_length({none, 0}, BodySize)),
     ?assertEqual({0, BodySize}, range_skip_length({none, BodySize}, BodySize)),
     ?assertEqual({0, BodySize}, range_skip_length({0, none}, BodySize)),
+    ?assertEqual({0, BodySize}, range_skip_length({0, BodySize + 1}, BodySize)),
     BodySizeLess1 = BodySize - 1,
     ?assertEqual({BodySizeLess1, 1},
                  range_skip_length({BodySize - 1, none}, BodySize)),
@@ -263,6 +300,8 @@ range_skip_length_test() ->
                  range_skip_length({-1, none}, BodySize)),
     ?assertEqual(invalid_range,
                  range_skip_length({BodySize, none}, BodySize)),
+    ?assertEqual(invalid_range,
+                 range_skip_length({BodySize + 1, BodySize + 5}, BodySize)),
     ok.
 
 -endif.
similarity index 50%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_io.erl
rename to deps/mochiweb/src/mochiweb_io.erl
index 8454b43dabe8ae9aad72352f906a0a47e94b0044..15b6b3ad90c4db707a57eef72854c0ed6839f9c2 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Utilities for dealing with IO devices (open files).
 
similarity index 90%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_mime.erl
rename to deps/mochiweb/src/mochiweb_mime.erl
index 7d9f2493c174b51656b0ec93e3b509b35124eaef..949d957520b020338ebf385419909fcef6b32fe1 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Gives a good MIME type guess based on file extension.
 
similarity index 96%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_multipart.erl
rename to deps/mochiweb/src/mochiweb_multipart.erl
index a4857d6e44c6e91f3082a9699327a1d7e6fb8f9b..1d18ae214f35903e1c15918e35a1815c95842406 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Utilities for parsing multipart/form-data.
 
@@ -38,7 +56,7 @@ parts_to_body([{Start, End, Body}], ContentType, Size) ->
     {HeaderList, Body};
 parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
     parts_to_multipart_body(BodyList, ContentType, Size,
-                            mochihex:to_hex(mochiweb_util:rand_bytes(8))).
+                            mochihex:to_hex(crypto:rand_bytes(8))).
 
 %% @spec parts_to_multipart_body([bodypart()], ContentType::string(),
 %%                               Size::integer(), Boundary::string()) ->
@@ -374,7 +392,7 @@ parse3(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -410,7 +428,7 @@ parse2(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -447,7 +465,7 @@ do_parse_form(Transport) ->
                  "--AaB03x--",
                  ""], "\r\n"),
     BinContent = iolist_to_binary(Content),
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -500,7 +518,7 @@ do_parse(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -552,7 +570,7 @@ parse_partial_body_boundary(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -605,7 +623,7 @@ parse_large_header(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -681,7 +699,7 @@ flash_parse(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -729,7 +747,7 @@ flash_parse2(Transport) ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
@@ -856,7 +874,7 @@ multipart_parsing_benchmark() ->
               body_end,
               eof],
     TestCallback = fun (Next) -> test_callback(Next, Expect) end,
-    ServerFun = fun (Socket) ->
+    ServerFun = fun (Socket, _Opts) ->
                         ok = mochiweb_socket:send(Socket, BinContent),
                         exit(normal)
                 end,
similarity index 76%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request.erl
rename to deps/mochiweb/src/mochiweb_request.erl
index d967bdb4f5e6dce5f41902222bf1a152dfa98205..39890ce0468184350710086a06d351cbf8bef041 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc MochiWeb HTTP Request abstraction.
 
@@ -11,9 +29,9 @@
 
 -define(QUIP, "Any of you quaids got a smint?").
 
--export([new/5]).
+-export([new/5, new/6]).
 -export([get_header_value/2, get_primary_header_value/2, get_combined_header_value/2, get/2, dump/1]).
--export([send/2, recv/2, recv/3, recv_body/1, recv_body/2, stream_body/4]).
+-export([send/2, recv/2, recv/3, recv_body/1, recv_body/2, stream_body/4, stream_body/5]).
 -export([start_response/2, start_response_length/2, start_raw_response/2]).
 -export([respond/2, ok/2]).
 -export([not_found/1, not_found/2]).
 -define(IDLE_TIMEOUT, 300000).
 
 % Maximum recv_body() length of 1MB
--define(MAX_RECV_BODY, 104857600).
+-define(MAX_RECV_BODY, (1024*1024)).
 
 %% @spec new(Socket, Method, RawPath, Version, headers()) -> request()
 %% @doc Create a new request instance.
 new(Socket, Method, RawPath, Version, Headers) ->
-    {?MODULE, [Socket, Method, RawPath, Version, Headers]}.
+    new(Socket, [], Method, RawPath, Version, Headers).
+
+%% @spec new(Socket, Opts, Method, RawPath, Version, headers()) -> request()
+%% @doc Create a new request instance.
+new(Socket, Opts, Method, RawPath, Version, Headers) ->
+    {?MODULE, [Socket, Opts, Method, RawPath, Version, Headers]}.
 
 %% @spec get_header_value(K, request()) -> undefined | Value
 %% @doc Get the value of a given request header.
-get_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) ->
+get_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
     mochiweb_headers:get_value(K, Headers).
 
-get_primary_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) ->
+get_primary_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
     mochiweb_headers:get_primary_value(K, Headers).
 
-get_combined_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) ->
+get_combined_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
     mochiweb_headers:get_combined_value(K, Headers).
 
 %% @type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range
@@ -70,24 +93,24 @@ get_combined_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, He
 %%      an ssl socket will be returned as <code>{ssl, SslSocket}</code>.
 %%      You can use <code>SslSocket</code> with the <code>ssl</code>
 %%      application, eg: <code>ssl:peercert(SslSocket)</code>.
-get(socket, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
+get(socket, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     Socket;
-get(scheme, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
+get(scheme, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     case mochiweb_socket:type(Socket) of
         plain ->
             http;
         ssl ->
             https
     end;
-get(method, {?MODULE, [_Socket, Method, _RawPath, _Version, _Headers]}) ->
+get(method, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}) ->
     Method;
-get(raw_path, {?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) ->
+get(raw_path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
     RawPath;
-get(version, {?MODULE, [_Socket, _Method, _RawPath, Version, _Headers]}) ->
+get(version, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}) ->
     Version;
-get(headers, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) ->
+get(headers, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
     Headers;
-get(peer, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+get(peer, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case mochiweb_socket:peername(Socket) of
         {ok, {Addr={10, _, _, _}, _Port}} ->
             case get_header_value("x-forwarded-for", THIS) of
@@ -108,7 +131,7 @@ get(peer, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
         {error, enotconn} ->
             exit(normal)
     end;
-get(path, {?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) ->
+get(path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
     case erlang:get(?SAVE_PATH) of
         undefined ->
             {Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
@@ -118,7 +141,7 @@ get(path, {?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) ->
         Cached ->
             Cached
     end;
-get(body_length, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+get(body_length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case erlang:get(?SAVE_BODY_LENGTH) of
         undefined ->
             BodyLength = body_length(THIS),
@@ -127,26 +150,29 @@ get(body_length, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THI
         {cached, Cached} ->
             Cached
     end;
-get(range, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+get(range, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case get_header_value(range, THIS) of
         undefined ->
             undefined;
         RawRange ->
             mochiweb_http:parse_range_request(RawRange)
-    end.
+    end;
+get(opts, {?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}) ->
+    Opts.
 
 %% @spec dump(request()) -> {mochiweb_request, [{atom(), term()}]}
 %% @doc Dump the internal representation to a "human readable" set of terms
 %%      for debugging/inspection purposes.
-dump({?MODULE, [_Socket, Method, RawPath, Version, Headers]}) ->
+dump({?MODULE, [_Socket, Opts, Method, RawPath, Version, Headers]}) ->
     {?MODULE, [{method, Method},
                {version, Version},
                {raw_path, RawPath},
+               {opts, Opts},
                {headers, mochiweb_headers:to_list(Headers)}]}.
 
 %% @spec send(iodata(), request()) -> ok
 %% @doc Send data over the socket.
-send(Data, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
+send(Data, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     case mochiweb_socket:send(Socket, Data) of
         ok ->
             ok;
@@ -157,13 +183,13 @@ send(Data, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
 %% @spec recv(integer(), request()) -> binary()
 %% @doc Receive Length bytes from the client as a binary, with the default
 %%      idle timeout.
-recv(Length, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+recv(Length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     recv(Length, ?IDLE_TIMEOUT, THIS).
 
 %% @spec recv(integer(), integer(), request()) -> binary()
 %% @doc Receive Length bytes from the client as a binary, with the given
 %%      Timeout in msec.
-recv(Length, Timeout, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
+recv(Length, Timeout, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     case mochiweb_socket:recv(Socket, Length, Timeout) of
         {ok, Data} ->
             put(?SAVE_RECV, true),
@@ -174,7 +200,7 @@ recv(Length, Timeout, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}
 
 %% @spec body_length(request()) -> undefined | chunked | unknown_transfer_encoding | integer()
 %% @doc  Infer body length from transfer-encoding and content-length headers.
-body_length({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+body_length({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case get_header_value("transfer-encoding", THIS) of
         undefined ->
             case get_combined_header_value("content-length", THIS) of
@@ -193,13 +219,13 @@ body_length({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
 %% @spec recv_body(request()) -> binary()
 %% @doc Receive the body of the HTTP request (defined by Content-Length).
 %%      Will only receive up to the default max-body length of 1MB.
-recv_body({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+recv_body({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     recv_body(?MAX_RECV_BODY, THIS).
 
 %% @spec recv_body(integer(), request()) -> binary()
 %% @doc Receive the body of the HTTP request (defined by Content-Length).
 %%      Will receive up to MaxBody bytes.
-recv_body(MaxBody, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+recv_body(MaxBody, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case erlang:get(?SAVE_BODY) of
         undefined ->
             % we could use a sane constant for max chunk size
@@ -219,11 +245,11 @@ recv_body(MaxBody, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=T
         Cached -> Cached
     end.
 
-stream_body(MaxChunkSize, ChunkFun, FunState, {?MODULE,[_Socket,_Method,_RawPath,_Version,_Headers]}=THIS) ->
+stream_body(MaxChunkSize, ChunkFun, FunState, {?MODULE,[_Socket,_Opts,_Method,_RawPath,_Version,_Headers]}=THIS) ->
     stream_body(MaxChunkSize, ChunkFun, FunState, undefined, THIS).
 
 stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength,
-            {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+            {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     Expect = case get_header_value("expect", THIS) of
                  undefined ->
                      undefined;
@@ -254,7 +280,7 @@ stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength,
             MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length ->
                 exit({body_too_large, content_length});
             _ ->
-                stream_unchunked_body(Length, ChunkFun, FunState, THIS)
+                stream_unchunked_body(MaxChunkSize,Length, ChunkFun, FunState, THIS)
             end
     end.
 
@@ -263,23 +289,16 @@ stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength,
 %% @doc Start the HTTP response by sending the Code HTTP response and
 %%      ResponseHeaders. The server will set header defaults such as Server
 %%      and Date if not present in ResponseHeaders.
-start_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
-    HResponse = mochiweb_headers:make(ResponseHeaders),
-    HResponse1 = mochiweb_headers:default_from_list(server_headers(),
-                                                    HResponse),
-    start_raw_response({Code, HResponse1}, THIS).
+start_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+    start_raw_response({Code, ResponseHeaders}, THIS).
 
 %% @spec start_raw_response({integer(), headers()}, request()) -> response()
 %% @doc Start the HTTP response by sending the Code HTTP response and
 %%      ResponseHeaders.
-start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Method, _RawPath, Version, _Headers]}=THIS) ->
-    F = fun ({K, V}, Acc) ->
-                [mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
-        end,
-    End = lists:foldl(F, [<<"\r\n">>],
-                      mochiweb_headers:to_list(ResponseHeaders)),
-    send([make_version(Version), make_code(Code), <<"\r\n">> | End], THIS),
-    mochiweb:new_response({THIS, Code, ResponseHeaders}).
+start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+    {Header, Response} = format_response_header({Code, ResponseHeaders}, THIS),
+    send(Header, THIS),
+    Response.
 
 
 %% @spec start_response_length({integer(), ioheaders(), integer()}, request()) -> response()
@@ -288,18 +307,42 @@ start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Method, _RawPat
 %%      will set header defaults such as Server
 %%      and Date if not present in ResponseHeaders.
 start_response_length({Code, ResponseHeaders, Length},
-                      {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+                      {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     HResponse = mochiweb_headers:make(ResponseHeaders),
     HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
     start_response({Code, HResponse1}, THIS).
 
+%% @spec format_response_header({integer(), ioheaders()} | {integer(), ioheaders(), integer()}, request()) -> iolist()
+%% @doc Format the HTTP response header, including the Code HTTP response and
+%%      ResponseHeaders including an optional Content-Length of Length. The server
+%%      will set header defaults such as Server
+%%      and Date if not present in ResponseHeaders.
+format_response_header({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) ->
+    HResponse = mochiweb_headers:make(ResponseHeaders),
+    HResponse1 = mochiweb_headers:default_from_list(server_headers(), HResponse),
+    HResponse2 = case should_close(THIS) of
+                     true ->
+                         mochiweb_headers:enter("Connection", "close", HResponse1);
+                     false ->
+                         HResponse1
+                 end,
+    End = [[mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">>]
+           || {K, V} <- mochiweb_headers:to_list(HResponse2)],
+    Response = mochiweb:new_response({THIS, Code, HResponse2}),
+    {[make_version(Version), make_code(Code), <<"\r\n">> | [End, <<"\r\n">>]], Response};
+format_response_header({Code, ResponseHeaders, Length},
+                       {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+    HResponse = mochiweb_headers:make(ResponseHeaders),
+    HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
+    format_response_header({Code, HResponse1}, THIS).
+
 %% @spec respond({integer(), ioheaders(), iodata() | chunked | {file, IoDevice}}, request()) -> response()
 %% @doc Start the HTTP response with start_response, and send Body to the
 %%      client (if the get(method) /= 'HEAD'). The Content-Length header
 %%      will be set by the Body length, and the server will insert header
 %%      defaults.
 respond({Code, ResponseHeaders, {file, IoDevice}},
-        {?MODULE, [_Socket, Method, _RawPath, _Version, _Headers]}=THIS) ->
+        {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) ->
     Length = mochiweb_io:iodevice_size(IoDevice),
     Response = start_response_length({Code, ResponseHeaders, Length}, THIS),
     case Method of
@@ -311,7 +354,7 @@ respond({Code, ResponseHeaders, {file, IoDevice}},
               IoDevice)
     end,
     Response;
-respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, Method, _RawPath, Version, _Headers]}=THIS) ->
+respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, _Opts, Method, _RawPath, Version, _Headers]}=THIS) ->
     HResponse = mochiweb_headers:make(ResponseHeaders),
     HResponse1 = case Method of
                      'HEAD' ->
@@ -333,34 +376,32 @@ respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, Method, _RawPath,
                          HResponse
                  end,
     start_response({Code, HResponse1}, THIS);
-respond({Code, ResponseHeaders, Body}, {?MODULE, [_Socket, Method, _RawPath, _Version, _Headers]}=THIS) ->
-    Response = start_response_length({Code, ResponseHeaders, iolist_size(Body)}, THIS),
+respond({Code, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) ->
+    {Header, Response} = format_response_header({Code, ResponseHeaders, iolist_size(Body)}, THIS),
     case Method of
-        'HEAD' ->
-            ok;
-        _ ->
-            send(Body, THIS)
+        'HEAD' -> send(Header, THIS);
+        _      -> send([Header, Body], THIS)
     end,
     Response.
 
 %% @spec not_found(request()) -> response()
 %% @doc Alias for <code>not_found([])</code>.
-not_found({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+not_found({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     not_found([], THIS).
 
 %% @spec not_found(ExtraHeaders, request()) -> response()
 %% @doc Alias for <code>respond({404, [{"Content-Type", "text/plain"}
 %% | ExtraHeaders], &lt;&lt;"Not found."&gt;&gt;})</code>.
-not_found(ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+not_found(ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
              <<"Not found.">>}, THIS).
 
 %% @spec ok({value(), iodata()} | {value(), ioheaders(), iodata() | {file, IoDevice}}, request()) ->
 %%           response()
 %% @doc respond({200, [{"Content-Type", ContentType} | Headers], Body}).
-ok({ContentType, Body}, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+ok({ContentType, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     ok({ContentType, [], Body}, THIS);
-ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     HResponse = mochiweb_headers:make(ResponseHeaders),
     case THIS:get(range) of
         X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked ->
@@ -393,7 +434,7 @@ ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Method, _RawPath,
 %% @spec should_close(request()) -> bool()
 %% @doc Return true if the connection must be closed. If false, using
 %%      Keep-Alive should be safe.
-should_close({?MODULE, [_Socket, _Method, _RawPath, Version, _Headers]}=THIS) ->
+should_close({?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) ->
     ForceClose = erlang:get(?SAVE_FORCE_CLOSE) =/= undefined,
     DidNotRecv = erlang:get(?SAVE_RECV) =:= undefined,
     ForceClose orelse Version < {1, 0}
@@ -419,7 +460,7 @@ is_close(_) ->
 %% @spec cleanup(request()) -> ok
 %% @doc Clean up any junk in the process dictionary, required before continuing
 %%      a Keep-Alive request.
-cleanup({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}) ->
+cleanup({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     L = [?SAVE_QS, ?SAVE_PATH, ?SAVE_RECV, ?SAVE_BODY, ?SAVE_BODY_LENGTH,
          ?SAVE_POST, ?SAVE_COOKIE, ?SAVE_FORCE_CLOSE],
     lists:foreach(fun(K) ->
@@ -429,7 +470,7 @@ cleanup({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}) ->
 
 %% @spec parse_qs(request()) -> [{Key::string(), Value::string()}]
 %% @doc Parse the query string of the URL.
-parse_qs({?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) ->
+parse_qs({?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
     case erlang:get(?SAVE_QS) of
         undefined ->
             {_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
@@ -442,12 +483,12 @@ parse_qs({?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) ->
 
 %% @spec get_cookie_value(Key::string, request()) -> string() | undefined
 %% @doc Get the value of the given cookie.
-get_cookie_value(Key, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+get_cookie_value(Key, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     proplists:get_value(Key, parse_cookie(THIS)).
 
 %% @spec parse_cookie(request()) -> [{Key::string(), Value::string()}]
 %% @doc Parse the cookie header.
-parse_cookie({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+parse_cookie({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case erlang:get(?SAVE_COOKIE) of
         undefined ->
             Cookies = case get_header_value("cookie", THIS) of
@@ -465,7 +506,7 @@ parse_cookie({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -
 %% @spec parse_post(request()) -> [{Key::string(), Value::string()}]
 %% @doc Parse an application/x-www-form-urlencoded form POST. This
 %%      has the side-effect of calling recv_body().
-parse_post({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+parse_post({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case erlang:get(?SAVE_POST) of
         undefined ->
             Parsed = case recv_body(THIS) of
@@ -489,7 +530,7 @@ parse_post({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
 %% @doc The function is called for each chunk.
 %%      Used internally by read_chunked_body.
 stream_chunked_body(MaxChunkSize, Fun, FunState,
-                    {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+                    {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case read_chunk_length(THIS) of
         0 ->
             Fun({0, read_chunk(0, THIS)}, FunState);
@@ -501,27 +542,28 @@ stream_chunked_body(MaxChunkSize, Fun, FunState,
             stream_chunked_body(MaxChunkSize, Fun, NewState, THIS)
     end.
 
-stream_unchunked_body(0, Fun, FunState, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}) ->
+stream_unchunked_body(_MaxChunkSize, 0, Fun, FunState, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     Fun({0, <<>>}, FunState);
-stream_unchunked_body(Length, Fun, FunState,
-                      {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > 0 ->
-    PktSize = case Length > ?RECBUF_SIZE of
-        true ->
-            ?RECBUF_SIZE;
-        false ->
-            Length
+stream_unchunked_body(MaxChunkSize, Length, Fun, FunState,
+                      {?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > 0 ->
+    RecBuf = case mochilists:get_value(recbuf, Opts, ?RECBUF_SIZE) of
+        undefined -> %os controlled buffer size
+            MaxChunkSize;
+        Val  ->
+            Val
     end,
+    PktSize=min(Length,RecBuf),
     Bin = recv(PktSize, THIS),
     NewState = Fun({PktSize, Bin}, FunState),
-    stream_unchunked_body(Length - PktSize, Fun, NewState, THIS).
+    stream_unchunked_body(MaxChunkSize, Length - PktSize, Fun, NewState, THIS).
 
 %% @spec read_chunk_length(request()) -> integer()
 %% @doc Read the length of the next HTTP chunk.
-read_chunk_length({?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
-    ok = mochiweb_socket:setopts(Socket, [{packet, line}]),
+read_chunk_length({?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])),
     case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
         {ok, Header} ->
-            ok = mochiweb_socket:setopts(Socket, [{packet, raw}]),
+            ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
             Splitter = fun (C) ->
                                C =/= $\r andalso C =/= $\n andalso C =/= $
                        end,
@@ -534,8 +576,8 @@ read_chunk_length({?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
 %% @spec read_chunk(integer(), request()) -> Chunk::binary() | [Footer::binary()]
 %% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the
 %%      HTTP footers (as a list of binaries, since they're nominal).
-read_chunk(0, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
-    ok = mochiweb_socket:setopts(Socket, [{packet, line}]),
+read_chunk(0, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])),
     F = fun (F1, Acc) ->
                 case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
                     {ok, <<"\r\n">>} ->
@@ -547,10 +589,10 @@ read_chunk(0, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
                 end
         end,
     Footers = F(F, []),
-    ok = mochiweb_socket:setopts(Socket, [{packet, raw}]),
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
     put(?SAVE_RECV, true),
     Footers;
-read_chunk(Length, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) ->
+read_chunk(Length, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
     case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
         {ok, <<Chunk:Length/binary, "\r\n">>} ->
             Chunk;
@@ -559,23 +601,23 @@ read_chunk(Length, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -
     end.
 
 read_sub_chunks(Length, MaxChunkSize, Fun, FunState,
-                {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > MaxChunkSize ->
+                {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > MaxChunkSize ->
     Bin = recv(MaxChunkSize, THIS),
     NewState = Fun({size(Bin), Bin}, FunState),
     read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState, THIS);
 
 read_sub_chunks(Length, _MaxChunkSize, Fun, FunState,
-                {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+                {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     Fun({Length, read_chunk(Length, THIS)}, FunState).
 
 %% @spec serve_file(Path, DocRoot, request()) -> Response
 %% @doc Serve a file relative to DocRoot.
-serve_file(Path, DocRoot, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+serve_file(Path, DocRoot, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     serve_file(Path, DocRoot, [], THIS).
 
 %% @spec serve_file(Path, DocRoot, ExtraHeaders, request()) -> Response
 %% @doc Serve a file relative to DocRoot.
-serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case mochiweb_util:safe_relative_path(Path) of
         undefined ->
             not_found(ExtraHeaders, THIS);
@@ -595,11 +637,11 @@ serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _
 directory_index(FullPath) ->
     filename:join([FullPath, "index.html"]).
 
-maybe_redirect([], FullPath, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+maybe_redirect([], FullPath, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS);
 
 maybe_redirect(RelPath, FullPath, ExtraHeaders,
-               {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}=THIS) ->
+               {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}=THIS) ->
     case string:right(RelPath, 1) of
         "/" ->
             maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS);
@@ -620,7 +662,7 @@ maybe_redirect(RelPath, FullPath, ExtraHeaders,
             respond({301, MoreHeaders, Body}, THIS)
     end.
 
-maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case file:read_file_info(File) of
         {ok, FileInfo} ->
             LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime),
@@ -647,7 +689,7 @@ maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Ver
 
 server_headers() ->
     [{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
-     {"Date", httpd_util:rfc1123_date()}].
+     {"Date", mochiweb_clock:rfc1123()}].
 
 make_code(X) when is_integer(X) ->
     [integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
@@ -672,7 +714,12 @@ range_parts({file, IoDevice}, Ranges) ->
     LocNums = lists:foldr(F, [], Ranges),
     {ok, Data} = file:pread(IoDevice, LocNums),
     Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
-                                   {Skip, Skip + Length - 1, PartialBody}
+                                   case Length of
+                                       0 ->
+                                           {Skip, Skip, <<>>};
+                                       _ ->
+                                           {Skip, Skip + Length - 1, PartialBody}
+                                   end
                            end,
                            LocNums, Data),
     {Bodies, Size};
@@ -714,7 +761,7 @@ range_parts(Body0, Ranges) ->
 %%         accepted_encodings(["gzip", "deflate", "identity"]) ->
 %%            ["deflate", "gzip", "identity"]
 %%
-accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     AcceptEncodingHeader = case get_header_value("Accept-Encoding", THIS) of
         undefined ->
             "";
@@ -752,7 +799,7 @@ accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Method, _RawPath, _V
 %%      5) For an "Accept" header with value "text/*; q=0.0, */*":
 %%         accepts_content_type("text/plain") -> false
 %%
-accepts_content_type(ContentType1, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+accepts_content_type(ContentType1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     ContentType = re:replace(ContentType1, "\\s", "", [global, {return, list}]),
     AcceptHeader = accept_header(THIS),
     case mochiweb_util:parse_qvalues(AcceptHeader) of
@@ -801,7 +848,7 @@ accepts_content_type(ContentType1, {?MODULE, [_Socket, _Method, _RawPath, _Versi
 %%         accepts_content_types(["application/json", "text/html"]) ->
 %%             ["text/html", "application/json"]
 %%
-accepted_content_types(Types1, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+accepted_content_types(Types1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     Types = lists:map(
         fun(T) -> re:replace(T, "\\s", "", [global, {return, list}]) end,
         Types1),
@@ -841,7 +888,7 @@ accepted_content_types(Types1, {?MODULE, [_Socket, _Method, _RawPath, _Version,
             [Type || {_Q, Type} <- lists:sort(SortFun, TypesQ)]
     end.
 
-accept_header({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) ->
+accept_header({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
     case get_header_value("Accept", THIS) of
         undefined ->
             "*/*";
similarity index 68%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_response.erl
rename to deps/mochiweb/src/mochiweb_response.erl
index 308a26bb52aef00577c2c1bdfd1790216bc815bd..195e652e197fb6682dc2bcbcce141f505f4926cf 100644 (file)
@@ -1,5 +1,23 @@
 %% @author Bob Ippolito <bob@mochimedia.com>
 %% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc Response abstraction.
 
similarity index 69%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_session.erl
rename to deps/mochiweb/src/mochiweb_session.erl
index ddf7c46441a4c001ec340c3fbd535cabc8e134f1..c9f88e20322e406e7ef98c080b538b465228dd38 100644 (file)
@@ -1,4 +1,23 @@
 %% @author Asier Azkuenaga Batiz <asier@zebixe.com>
+%% @copyright 2013 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
+%%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
 
 %% @doc HTTP Cookie session. Note that the expiration time travels unencrypted
 %% as far as this module is concerned. In order to achieve more security,
 
 %% @doc Generates a secure encrypted binary convining all the parameters. The
 %% expiration time must be a 32-bit integer.
-%% -spec generate_session_data(
-%%        ExpirationTime :: expiration_time(),
-%%        Data :: iolist(),
-%%        FSessionKey :: key_fun(),
-%%        ServerKey :: iolist()) -> binary().
+-spec generate_session_data(
+        ExpirationTime :: expiration_time(),
+        Data :: iolist(),
+        FSessionKey :: key_fun(),
+        ServerKey :: iolist()) -> binary().
 generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey)
   when is_integer(ExpirationTime), is_function(FSessionKey)->
     BData = ensure_binary(Data),
@@ -39,11 +58,11 @@ generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey)
 %% @doc Convenience wrapper for generate_session_data that returns a
 %% mochiweb cookie with "id" as the key, a max_age of 20000 seconds,
 %% and the current local time as local time.
-%% -spec generate_session_cookie(
-%%        ExpirationTime :: expiration_time(),
-%%        Data :: iolist(),
-%%        FSessionKey :: key_fun(),
-%%        ServerKey :: iolist()) -> header().
+-spec generate_session_cookie(
+        ExpirationTime :: expiration_time(),
+        Data :: iolist(),
+        FSessionKey :: key_fun(),
+        ServerKey :: iolist()) -> header().
 generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey)
   when is_integer(ExpirationTime), is_function(FSessionKey)->
     CookieData = generate_session_data(ExpirationTime, Data,
@@ -55,13 +74,13 @@ generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey)
                                 calendar:universal_time())}]).
 
 %% TODO: This return type is messy to express in the type system.
-%% -spec check_session_cookie(
-    %%     ECookie :: binary(),
-    %%     ExpirationTime :: string(),
-    %%     FSessionKey :: key_fun(),
-    %%     ServerKey :: iolist()) ->
-    %% {Success :: boolean(),
-    %%  ExpTimeAndData :: [integer() | binary()]}.
+-spec check_session_cookie(
+        ECookie :: binary(),
+        ExpirationTime :: string(),
+        FSessionKey :: key_fun(),
+        ServerKey :: iolist()) ->
+    {Success :: boolean(),
+     ExpTimeAndData :: [integer() | binary()]}.
 check_session_cookie(ECookie, ExpirationTime, FSessionKey, ServerKey)
   when is_binary(ECookie), is_integer(ExpirationTime),
        is_function(FSessionKey) ->
@@ -83,7 +102,7 @@ check_session_cookie(_ECookie, _ExpirationTime, _FSessionKey, _ServerKey) ->
     {false, []}.
 
 %% 'Constant' time =:= operator for binary, to mitigate timing attacks.
-%% -spec eq(binary(), binary()) -> boolean().
+-spec eq(binary(), binary()) -> boolean().
 eq(A, B) when is_binary(A) andalso is_binary(B) ->
     eq(A, B, 0).
 
@@ -94,30 +113,51 @@ eq(<<>>, <<>>, 0) ->
 eq(_As, _Bs, _Acc) ->
     false.
 
-%% -spec ensure_binary(iolist()) -> binary().
+-spec ensure_binary(iolist()) -> binary().
 ensure_binary(B) when is_binary(B) ->
     B;
 ensure_binary(L) when is_list(L) ->
     iolist_to_binary(L).
 
-%% -spec encrypt_data(binary(), binary()) -> binary().
+-ifdef(crypto_compatibility).
+-spec encrypt_data(binary(), binary()) -> binary().
 encrypt_data(Data, Key) ->
     IV = crypto:rand_bytes(16),
     Crypt = crypto:aes_cfb_128_encrypt(Key, IV, Data),
     <<IV/binary, Crypt/binary>>.
 
-%% -spec decrypt_data(binary(), binary()) -> binary().
+-spec decrypt_data(binary(), binary()) -> binary().
 decrypt_data(<<IV:16/binary, Crypt/binary>>, Key) ->
     crypto:aes_cfb_128_decrypt(Key, IV, Crypt).
 
-%% -spec gen_key(iolist(), iolist()) -> binary().
+-spec gen_key(iolist(), iolist()) -> binary().
 gen_key(ExpirationTime, ServerKey)->
     crypto:md5_mac(ServerKey, [ExpirationTime]).
 
-%% -spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary().
+-spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary().
 gen_hmac(ExpirationTime, Data, SessionKey, Key) ->
     crypto:sha_mac(Key, [ExpirationTime, Data, SessionKey]).
 
+-else.
+-spec encrypt_data(binary(), binary()) -> binary().
+encrypt_data(Data, Key) ->
+    IV = crypto:rand_bytes(16),
+    Crypt = crypto:block_encrypt(aes_cfb128, Key, IV, Data),
+    <<IV/binary, Crypt/binary>>.
+
+-spec decrypt_data(binary(), binary()) -> binary().
+decrypt_data(<<IV:16/binary, Crypt/binary>>, Key) ->
+    crypto:block_decrypt(aes_cfb128, Key, IV, Crypt).
+
+-spec gen_key(iolist(), iolist()) -> binary().
+gen_key(ExpirationTime, ServerKey)->
+    crypto:hmac(md5, ServerKey, [ExpirationTime]).
+
+-spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary().
+gen_hmac(ExpirationTime, Data, SessionKey, Key) ->
+    crypto:hmac(sha, Key, [ExpirationTime, Data, SessionKey]).
+
+-endif.
 
 -ifdef(TEST).
 -include_lib("eunit/include/eunit.hrl").
diff --git a/deps/mochiweb/src/mochiweb_socket.erl b/deps/mochiweb/src/mochiweb_socket.erl
new file mode 100644 (file)
index 0000000..1756b8e
--- /dev/null
@@ -0,0 +1,148 @@
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc MochiWeb socket - wrapper for plain and ssl sockets.
+
+-module(mochiweb_socket).
+
+-export([listen/4,
+         accept/1, transport_accept/1, finish_accept/1,
+         recv/3, send/2, close/1, port/1, peername/1,
+         setopts/2, getopts/2, type/1, exit_if_closed/1]).
+
+-define(ACCEPT_TIMEOUT, 2000).
+-define(SSL_TIMEOUT, 10000).
+-define(SSL_HANDSHAKE_TIMEOUT, 20000).
+
+
+listen(Ssl, Port, Opts, SslOpts) ->
+    case Ssl of
+        true ->
+            Opts1 = add_unbroken_ciphers_default(Opts ++ SslOpts),
+            Opts2 = add_safe_protocol_versions(Opts1),
+            case ssl:listen(Port, Opts2) of
+                {ok, ListenSocket} ->
+                    {ok, {ssl, ListenSocket}};
+                {error, _} = Err ->
+                    Err
+            end;
+        false ->
+            gen_tcp:listen(Port, Opts)
+    end.
+
+add_unbroken_ciphers_default(Opts) ->
+    Default = filter_unsecure_cipher_suites(ssl:cipher_suites()),
+    Ciphers = filter_broken_cipher_suites(proplists:get_value(ciphers, Opts, Default)),
+    [{ciphers, Ciphers} | proplists:delete(ciphers, Opts)].
+
+filter_broken_cipher_suites(Ciphers) ->
+       case proplists:get_value(ssl_app, ssl:versions()) of
+               "5.3" ++ _ ->
+            lists:filter(fun(Suite) ->
+                                 string:left(atom_to_list(element(1, Suite)), 4) =/= "ecdh"
+                         end, Ciphers);
+        _ ->
+            Ciphers
+    end.
+
+filter_unsecure_cipher_suites(Ciphers) ->
+    lists:filter(fun
+                    ({_,des_cbc,_}) -> false;
+                    ({_,_,md5}) -> false;
+                    (_) -> true
+                 end,
+                 Ciphers).
+
+add_safe_protocol_versions(Opts) ->
+    case proplists:is_defined(versions, Opts) of
+        true ->
+            Opts;
+        false ->
+            Versions = filter_unsafe_protcol_versions(proplists:get_value(available, ssl:versions())),
+            [{versions, Versions} | Opts]
+    end.
+
+filter_unsafe_protcol_versions(Versions) ->
+    lists:filter(fun
+                    (sslv3) -> false;
+                    (_) -> true
+                 end,
+                 Versions).
+
+%% Provided for backwards compatibility only
+accept(ListenSocket) ->
+    case transport_accept(ListenSocket) of
+        {ok, Socket} ->
+            finish_accept(Socket);
+        {error, _} = Err ->
+            Err
+    end.
+
+transport_accept({ssl, ListenSocket}) ->
+    case ssl:transport_accept(ListenSocket, ?SSL_TIMEOUT) of
+        {ok, Socket} ->
+            {ok, {ssl, Socket}};
+        {error, _} = Err ->
+            Err
+    end;
+transport_accept(ListenSocket) ->
+    gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT).
+
+finish_accept({ssl, Socket}) ->
+    case ssl:ssl_accept(Socket, ?SSL_HANDSHAKE_TIMEOUT) of
+        ok ->
+            {ok, {ssl, Socket}};
+        {error, _} = Err ->
+            Err
+    end;
+finish_accept(Socket) ->
+    {ok, Socket}.
+
+recv({ssl, Socket}, Length, Timeout) ->
+    ssl:recv(Socket, Length, Timeout);
+recv(Socket, Length, Timeout) ->
+    gen_tcp:recv(Socket, Length, Timeout).
+
+send({ssl, Socket}, Data) ->
+    ssl:send(Socket, Data);
+send(Socket, Data) ->
+    gen_tcp:send(Socket, Data).
+
+close({ssl, Socket}) ->
+    ssl:close(Socket);
+close(Socket) ->
+    gen_tcp:close(Socket).
+
+port({ssl, Socket}) ->
+    case ssl:sockname(Socket) of
+        {ok, {_, Port}} ->
+            {ok, Port};
+        {error, _} = Err ->
+            Err
+    end;
+port(Socket) ->
+    inet:port(Socket).
+
+peername({ssl, Socket}) ->
+    ssl:peername(Socket);
+peername(Socket) ->
+    inet:peername(Socket).
+
+setopts({ssl, Socket}, Opts) ->
+    ssl:setopts(Socket, Opts);
+setopts(Socket, Opts) ->
+    inet:setopts(Socket, Opts).
+
+getopts({ssl, Socket}, Opts) ->
+    ssl:getopts(Socket, Opts);
+getopts(Socket, Opts) ->
+    inet:getopts(Socket, Opts).
+
+type({ssl, _}) ->
+    ssl;
+type(_) ->
+    plain.
+
+exit_if_closed({error, closed}) ->
+    exit(normal);
+exit_if_closed(Res) ->
+    Res.
similarity index 76%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket_server.erl
rename to deps/mochiweb/src/mochiweb_socket_server.erl
index a3d4da370777f9c5cb54a590be02848c6f6d0bcf..fd5e3824cee6bb51d13b460520369011f7833b9b 100644 (file)
         {port,
          loop,
          name=undefined,
-         %% NOTE: This is currently ignored.
          max=2048,
          ip=any,
          listen=null,
          nodelay=false,
+         recbuf=?RECBUF_SIZE,
          backlog=128,
          active_sockets=0,
          acceptor_pool_size=16,
@@ -74,7 +74,16 @@ parse_options(State=#mochiweb_socket_server{}) ->
 parse_options(Options) ->
     parse_options(Options, #mochiweb_socket_server{}).
 
-parse_options([], State) ->
+parse_options([], State=#mochiweb_socket_server{acceptor_pool_size=PoolSize,
+                                                max=Max}) ->
+    case Max < PoolSize of
+        true ->
+            error_logger:info_report([{warning, "max is set lower than acceptor_pool_size"},
+                                      {max, Max},
+                                      {acceptor_pool_size, PoolSize}]);
+        false ->
+            ok
+    end,
     State;
 parse_options([{name, L} | Rest], State) when is_list(L) ->
     Name = {local, list_to_atom(L)},
@@ -108,13 +117,26 @@ parse_options([{backlog, Backlog} | Rest], State) ->
     parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog});
 parse_options([{nodelay, NoDelay} | Rest], State) ->
     parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay});
+parse_options([{recbuf, RecBuf} | Rest], State) when is_integer(RecBuf) orelse
+                                                RecBuf == undefined ->
+    %% XXX: `recbuf' value which is passed to `gen_tcp'
+    %% and value reported by `inet:getopts(P, [recbuf])' may
+    %% differ. They depends on underlying OS. From linux mans:
+    %%
+    %% The kernel doubles this value (to allow space for
+    %% bookkeeping overhead) when it is set using setsockopt(2),
+    %% and this doubled value is returned by getsockopt(2).
+    %%
+    %% See: man 7 socket | grep SO_RCVBUF
+    %% 
+    %% In case undefined is passed instead of the default buffer
+    %% size ?RECBUF_SIZE, no size is set and the OS can control it dynamically
+    parse_options(Rest, State#mochiweb_socket_server{recbuf=RecBuf});
 parse_options([{acceptor_pool_size, Max} | Rest], State) ->
     MaxInt = ensure_int(Max),
     parse_options(Rest,
                   State#mochiweb_socket_server{acceptor_pool_size=MaxInt});
 parse_options([{max, Max} | Rest], State) ->
-    error_logger:info_report([{warning, "TODO: max is currently unsupported"},
-                              {max, Max}]),
     MaxInt = ensure_int(Max),
     parse_options(Rest, State#mochiweb_socket_server{max=MaxInt});
 parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) ->
@@ -156,13 +178,14 @@ ipv6_supported() ->
             false
     end.
 
-init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) ->
+init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog,
+                                   nodelay=NoDelay, recbuf=RecBuf}) ->
     process_flag(trap_exit, true),
+
     BaseOpts = [binary,
                 {reuseaddr, true},
                 {packet, 0},
                 {backlog, Backlog},
-                {recbuf, ?RECBUF_SIZE},
                 {exit_on_close, false},
                 {active, false},
                 {nodelay, NoDelay}],
@@ -177,33 +200,41 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=No
         {_, _, _, _, _, _, _, _} -> % IPv6
             [inet6, {ip, Ip} | BaseOpts]
     end,
-    listen(Port, Opts, State).
-
-new_acceptor_pool(Listen,
-                  State=#mochiweb_socket_server{acceptor_pool=Pool,
-                                                acceptor_pool_size=Size,
-                                                loop=Loop}) ->
-    F = fun (_, S) ->
-                Pid = mochiweb_acceptor:start_link(self(), Listen, Loop),
-                sets:add_element(Pid, S)
-        end,
-    Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)),
-    State#mochiweb_socket_server{acceptor_pool=Pool1}.
+    OptsBuf=case RecBuf of 
+        undefined ->
+            Opts;
+        _ ->
+            [{recbuf, RecBuf}|Opts]
+    end,
+    listen(Port, OptsBuf, State).
+
+new_acceptor_pool(State=#mochiweb_socket_server{acceptor_pool_size=Size}) ->
+    lists:foldl(fun (_, S) -> new_acceptor(S) end, State, lists:seq(1, Size)).
+
+new_acceptor(State=#mochiweb_socket_server{acceptor_pool=Pool,
+                                           recbuf=RecBuf,
+                                           loop=Loop,
+                                           listen=Listen}) ->
+    LoopOpts = [{recbuf, RecBuf}],
+    Pid = mochiweb_acceptor:start_link(self(), Listen, Loop, LoopOpts),
+    State#mochiweb_socket_server{
+      acceptor_pool=sets:add_element(Pid, Pool)}.
 
 listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) ->
     case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of
         {ok, Listen} ->
             {ok, ListenPort} = mochiweb_socket:port(Listen),
-            {ok, new_acceptor_pool(
-                   Listen,
-                   State#mochiweb_socket_server{listen=Listen,
-                                                port=ListenPort})};
+            {ok, new_acceptor_pool(State#mochiweb_socket_server{
+                                     listen=Listen,
+                                     port=ListenPort})};
         {error, Reason} ->
             {stop, Reason}
     end.
 
 do_get(port, #mochiweb_socket_server{port=Port}) ->
     Port;
+do_get(waiting_acceptors, #mochiweb_socket_server{acceptor_pool=Pool}) ->
+    sets:size(Pool);
 do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) ->
     ActiveSockets.
 
@@ -271,16 +302,31 @@ code_change(_OldVsn, State, _Extra) ->
 
 recycle_acceptor(Pid, State=#mochiweb_socket_server{
                         acceptor_pool=Pool,
-                        listen=Listen,
-                        loop=Loop,
+                        acceptor_pool_size=PoolSize,
+                        max=Max,
                         active_sockets=ActiveSockets}) ->
-    case sets:is_element(Pid, Pool) of
-        true ->
-            Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop),
-            Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)),
-            State#mochiweb_socket_server{acceptor_pool=Pool1};
-        false ->
-            State#mochiweb_socket_server{active_sockets=ActiveSockets - 1}
+    %% A socket is considered to be active from immediately after it
+    %% has been accepted (see the {accepted, Pid, Timing} cast above).
+    %% This function will be called when an acceptor is transitioning
+    %% to an active socket, or when either type of Pid dies. An acceptor
+    %% Pid will always be in the acceptor_pool set, and an active socket
+    %% will be in that set during the transition but not afterwards.
+    Pool1 = sets:del_element(Pid, Pool),
+    NewSize = sets:size(Pool1),
+    ActiveSockets1 = case NewSize =:= sets:size(Pool) of
+                         %% Pid has died and it is not in the acceptor set,
+                         %% it must be an active socket.
+                         true -> max(0, ActiveSockets - 1);
+                         false -> ActiveSockets
+                     end,
+    State1 = State#mochiweb_socket_server{
+               acceptor_pool=Pool1,
+               active_sockets=ActiveSockets1},
+    %% Spawn a new acceptor only if it will not overrun the maximum socket
+    %% count or the maximum pool size.
+    case NewSize + ActiveSockets1 < Max andalso NewSize < PoolSize of
+        true -> new_acceptor(State1);
+        false -> State1
     end.
 
 handle_info(Msg, State) when ?is_old_state(State) ->
similarity index 97%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_util.erl
rename to deps/mochiweb/src/mochiweb_util.erl
index a0bc2bc9a5973825751b963da51ff345567fa93f..c6067674cca5ea59488bc70bbfd3c57a04aff3c4 100644 (file)
@@ -13,7 +13,7 @@
 -export([record_to_proplist/2, record_to_proplist/3]).
 -export([safe_relative_path/1, partition/2]).
 -export([parse_qvalues/1, pick_accepted_encodings/3]).
--export([make_io/1, rand_bytes/1, rand_uniform/2]).
+-export([make_io/1]).
 
 -define(PERCENT, 37).  % $\%
 -define(FULLSTOP, 46). % $\.
@@ -357,11 +357,16 @@ urlsplit_query([C | Rest], Acc) ->
 %% @spec guess_mime(string()) -> string()
 %% @doc  Guess the mime type of a file by the extension of its filename.
 guess_mime(File) ->
-    case mochiweb_mime:from_extension(filename:extension(File)) of
-        undefined ->
-            "text/plain";
-        Mime ->
-            Mime
+    case filename:basename(File) of
+        "crossdomain.xml" ->
+            "text/x-cross-domain-policy";
+        Name ->
+            case mochiweb_mime:from_extension(filename:extension(Name)) of
+                undefined ->
+                    "text/plain";
+                Mime ->
+                    Mime
+            end
     end.
 
 %% @spec parse_header(string()) -> {Type, [{K, V}]}
@@ -581,12 +586,6 @@ make_io(Integer) when is_integer(Integer) ->
 make_io(Io) when is_list(Io); is_binary(Io) ->
     Io.
 
-rand_bytes(Count) ->
-    list_to_binary([rand_uniform(0, 16#FF + 1) || _ <- lists:seq(1, Count)]).
-
-rand_uniform(Lo, Hi) ->
-    random:uniform(Hi - Lo) + Lo - 1.
-
 %%
 %% Tests
 %%
@@ -692,12 +691,14 @@ parse_header_test() ->
     ok.
 
 guess_mime_test() ->
-    "text/plain" = guess_mime(""),
-    "text/plain" = guess_mime(".text"),
-    "application/zip" = guess_mime(".zip"),
-    "application/zip" = guess_mime("x.zip"),
-    "text/html" = guess_mime("x.html"),
-    "application/xhtml+xml" = guess_mime("x.xhtml"),
+    ?assertEqual("text/plain", guess_mime("")),
+    ?assertEqual("text/plain", guess_mime(".text")),
+    ?assertEqual("application/zip", guess_mime(".zip")),
+    ?assertEqual("application/zip", guess_mime("x.zip")),
+    ?assertEqual("text/html", guess_mime("x.html")),
+    ?assertEqual("application/xhtml+xml", guess_mime("x.xhtml")),
+    ?assertEqual("text/x-cross-domain-policy", guess_mime("crossdomain.xml")),
+    ?assertEqual("text/x-cross-domain-policy", guess_mime("www/crossdomain.xml")),
     ok.
 
 path_split_test() ->
diff --git a/deps/mochiweb/src/mochiweb_websocket.erl b/deps/mochiweb/src/mochiweb_websocket.erl
new file mode 100644 (file)
index 0000000..ceb6bd6
--- /dev/null
@@ -0,0 +1,281 @@
+-module(mochiweb_websocket).
+-author('lukasz.lalik@zadane.pl').
+
+%% The MIT License (MIT)
+
+%% Copyright (c) 2012 Zadane.pl sp. z o.o.
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+%% @doc Websockets module for Mochiweb. Based on Misultin websockets module.
+
+-export([loop/5, upgrade_connection/2, request/5]).
+-export([send/3]).
+-ifdef(TEST).
+-compile(export_all).
+-endif.
+
+loop(Socket, Body, State, WsVersion, ReplyChannel) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, 0}, {active, once}])),
+    proc_lib:hibernate(?MODULE, request,
+                       [Socket, Body, State, WsVersion, ReplyChannel]).
+
+request(Socket, Body, State, WsVersion, ReplyChannel) ->
+    receive
+        {tcp_closed, _} ->
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        {ssl_closed, _} ->
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        {tcp_error, _, _} ->
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        {Proto, _, WsFrames} when Proto =:= tcp orelse Proto =:= ssl ->
+            case parse_frames(WsVersion, WsFrames, Socket) of
+                close ->
+                    mochiweb_socket:close(Socket),
+                    exit(normal);
+                error ->
+                    mochiweb_socket:close(Socket),
+                    exit(normal);
+                Payload ->
+                    NewState = call_body(Body, Payload, State, ReplyChannel),
+                    loop(Socket, Body, NewState, WsVersion, ReplyChannel)
+            end;
+        _ ->
+            mochiweb_socket:close(Socket),
+            exit(normal)
+    end.
+
+call_body({M, F, A}, Payload, State, ReplyChannel) ->
+    erlang:apply(M, F, [Payload, State, ReplyChannel | A]);
+call_body({M, F}, Payload, State, ReplyChannel) ->
+    M:F(Payload, State, ReplyChannel);
+call_body(Body, Payload, State, ReplyChannel) ->
+    Body(Payload, State, ReplyChannel).
+
+send(Socket, Payload, hybi) ->
+    Prefix = <<1:1, 0:3, 1:4, (payload_length(iolist_size(Payload)))/binary>>,
+    mochiweb_socket:send(Socket, [Prefix, Payload]);
+send(Socket, Payload, hixie) ->
+    mochiweb_socket:send(Socket, [0, Payload, 255]).
+
+upgrade_connection(Req, Body) ->
+    case make_handshake(Req) of
+        {Version, Response} ->
+            Req:respond(Response),
+            Socket = Req:get(socket),
+            ReplyChannel = fun (Payload) ->
+                ?MODULE:send(Socket, Payload, Version)
+            end,
+            Reentry = fun (State) ->
+                ?MODULE:loop(Socket, Body, State, Version, ReplyChannel)
+            end,
+            {Reentry, ReplyChannel};
+        _ ->
+            mochiweb_socket:close(Req:get(socket)),
+            exit(normal)
+    end.
+
+make_handshake(Req) ->
+    SecKey  = Req:get_header_value("sec-websocket-key"),
+    Sec1Key = Req:get_header_value("Sec-WebSocket-Key1"),
+    Sec2Key = Req:get_header_value("Sec-WebSocket-Key2"),
+    Origin = Req:get_header_value(origin),
+    if SecKey =/= undefined ->
+            hybi_handshake(SecKey);
+       Sec1Key =/= undefined andalso Sec2Key =/= undefined ->
+            Host = Req:get_header_value("Host"),
+            Path = Req:get(path),
+            Body = Req:recv(8),
+            Scheme = scheme(Req),
+            hixie_handshake(Scheme, Host, Path, Sec1Key, Sec2Key, Body, Origin);
+       true ->
+          error
+    end.
+
+hybi_handshake(SecKey) ->
+    BinKey = list_to_binary(SecKey),
+    Bin = <<BinKey/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11">>,
+    Challenge = base64:encode(crypto:hash(sha, Bin)),
+    Response = {101, [{"Connection", "Upgrade"},
+                      {"Upgrade", "websocket"},
+                      {"Sec-Websocket-Accept", Challenge}], ""},
+    {hybi, Response}.
+
+scheme(Req) ->
+    case mochiweb_request:get(scheme, Req) of
+        http ->
+            "ws://";
+        https ->
+            "wss://"
+    end.
+
+hixie_handshake(Scheme, Host, Path, Key1, Key2, Body, Origin) ->
+    Ikey1 = [D || D <- Key1, $0 =< D, D =< $9],
+    Ikey2 = [D || D <- Key2, $0 =< D, D =< $9],
+    Blank1 = length([D || D <- Key1, D =:= 32]),
+    Blank2 = length([D || D <- Key2, D =:= 32]),
+    Part1 = erlang:list_to_integer(Ikey1) div Blank1,
+    Part2 = erlang:list_to_integer(Ikey2) div Blank2,
+    Ckey = <<Part1:4/big-unsigned-integer-unit:8,
+            Part2:4/big-unsigned-integer-unit:8,
+            Body/binary>>,
+    Challenge = erlang:md5(Ckey),
+    Location = lists:concat([Scheme, Host, Path]),
+    Response = {101, [{"Upgrade", "WebSocket"},
+                      {"Connection", "Upgrade"},
+                      {"Sec-WebSocket-Origin", Origin},
+                      {"Sec-WebSocket-Location", Location}],
+                Challenge},
+    {hixie, Response}.
+
+parse_frames(hybi, Frames, Socket) ->
+    try parse_hybi_frames(Socket, Frames, []) of
+        Parsed -> process_frames(Parsed, [])
+    catch
+        _:_ -> error
+    end;
+parse_frames(hixie, Frames, _Socket) ->
+    try parse_hixie_frames(Frames, []) of
+        Payload -> Payload
+    catch
+        _:_ -> error
+    end.
+
+%%
+%% Websockets internal functions for RFC6455 and hybi draft
+%%
+process_frames([], Acc) ->
+    lists:reverse(Acc);
+process_frames([{Opcode, Payload} | Rest], Acc) ->
+    case Opcode of
+        8 -> close;
+        _ ->
+            process_frames(Rest, [Payload | Acc])
+    end.
+
+parse_hybi_frames(_, <<>>, Acc) ->
+    lists:reverse(Acc);
+parse_hybi_frames(S, <<_Fin:1,
+                      _Rsv:3,
+                      Opcode:4,
+                      _Mask:1,
+                      PayloadLen:7,
+                      MaskKey:4/binary,
+                      Payload:PayloadLen/binary-unit:8,
+                      Rest/binary>>,
+                  Acc) when PayloadLen < 126 ->
+    Payload2 = hybi_unmask(Payload, MaskKey, <<>>),
+    parse_hybi_frames(S, Rest, [{Opcode, Payload2} | Acc]);
+parse_hybi_frames(S, <<_Fin:1,
+                      _Rsv:3,
+                      Opcode:4,
+                      _Mask:1,
+                      126:7,
+                      PayloadLen:16,
+                      MaskKey:4/binary,
+                      Payload:PayloadLen/binary-unit:8,
+                      Rest/binary>>,
+                  Acc) ->
+    Payload2 = hybi_unmask(Payload, MaskKey, <<>>),
+    parse_hybi_frames(S, Rest, [{Opcode, Payload2} | Acc]);
+parse_hybi_frames(Socket, <<_Fin:1,
+                           _Rsv:3,
+                           _Opcode:4,
+                           _Mask:1,
+                           126:7,
+                           _PayloadLen:16,
+                           _MaskKey:4/binary,
+                           _/binary-unit:8>> = PartFrame,
+                  Acc) ->
+    ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, 0}, {active, once}])),
+    receive
+        {tcp_closed, _} ->
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        {ssl_closed, _} ->
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        {tcp_error, _, _} ->
+            mochiweb_socket:close(Socket),
+            exit(normal);
+        {Proto, _, Continuation} when Proto =:= tcp orelse Proto =:= ssl ->
+            parse_hybi_frames(Socket, <<PartFrame/binary, Continuation/binary>>,
+                              Acc);
+        _ ->
+            mochiweb_socket:close(Socket),
+            exit(normal)
+    after
+        5000 ->
+            mochiweb_socket:close(Socket),
+            exit(normal)
+    end;
+parse_hybi_frames(S, <<_Fin:1,
+                      _Rsv:3,
+                      Opcode:4,
+                      _Mask:1,
+                      127:7,
+                      0:1,
+                      PayloadLen:63,
+                      MaskKey:4/binary,
+                      Payload:PayloadLen/binary-unit:8,
+                      Rest/binary>>,
+                  Acc) ->
+    Payload2 = hybi_unmask(Payload, MaskKey, <<>>),
+    parse_hybi_frames(S, Rest, [{Opcode, Payload2} | Acc]).
+
+%% Unmasks RFC 6455 message
+hybi_unmask(<<O:32, Rest/bits>>, MaskKey, Acc) ->
+    <<MaskKey2:32>> = MaskKey,
+    hybi_unmask(Rest, MaskKey, <<Acc/binary, (O bxor MaskKey2):32>>);
+hybi_unmask(<<O:24>>, MaskKey, Acc) ->
+    <<MaskKey2:24, _:8>> = MaskKey,
+    <<Acc/binary, (O bxor MaskKey2):24>>;
+hybi_unmask(<<O:16>>, MaskKey, Acc) ->
+    <<MaskKey2:16, _:16>> = MaskKey,
+    <<Acc/binary, (O bxor MaskKey2):16>>;
+hybi_unmask(<<O:8>>, MaskKey, Acc) ->
+    <<MaskKey2:8, _:24>> = MaskKey,
+    <<Acc/binary, (O bxor MaskKey2):8>>;
+hybi_unmask(<<>>, _MaskKey, Acc) ->
+    Acc.
+
+payload_length(N) ->
+    case N of
+        N when N =< 125 -> << N >>;
+        N when N =< 16#ffff -> << 126, N:16 >>;
+        N when N =< 16#7fffffffffffffff -> << 127, N:64 >>
+    end.
+
+
+%%
+%% Websockets internal functions for hixie-76 websocket version
+%%
+parse_hixie_frames(<<>>, Frames) ->
+  lists:reverse(Frames);
+parse_hixie_frames(<<0, T/binary>>, Frames) ->
+  {Frame, Rest} = parse_hixie(T, <<>>),
+  parse_hixie_frames(Rest, [Frame | Frames]).
+
+parse_hixie(<<255, Rest/binary>>, Buffer) ->
+  {Buffer, Rest};
+parse_hixie(<<H, T/binary>>, Buffer) ->
+  parse_hixie(T, <<Buffer/binary, H>>).
similarity index 81%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/reloader.erl
rename to deps/mochiweb/src/reloader.erl
index 8266b338bc6f9fc1acf47dc4def1901fa0b053ec..8130f45f393c4110727f52e5aab3469aca9cd4c1 100644 (file)
@@ -1,6 +1,24 @@
-%% @copyright 2007 Mochi Media, Inc.
 %% @author Matthew Dempsky <matthew@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+%%
+%% Permission is hereby granted, free of charge, to any person obtaining a
+%% copy of this software and associated documentation files (the "Software"),
+%% to deal in the Software without restriction, including without limitation
+%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
+%% and/or sell copies of the Software, and to permit persons to whom the
+%% Software is furnished to do so, subject to the following conditions:
 %%
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+%% DEALINGS IN THE SOFTWARE.
+
 %% @doc Erlang module for automatically reloading modified modules
 %% during development.
 
similarity index 92%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp.template
rename to deps/mochiweb/support/templates/mochiwebapp.template
index 494260938f0d30db858ec5d8fcc24526abca4d83..c56314c39375411cc88305a0722af00dc3a28eb4 100644 (file)
@@ -13,6 +13,7 @@
 {template, "mochiwebapp_skel/src/mochiapp_sup.erl", "{{dest}}/src/{{appid}}_sup.erl"}.
 {template, "mochiwebapp_skel/src/mochiapp_web.erl", "{{dest}}/src/{{appid}}_web.erl"}.
 {template, "mochiwebapp_skel/start-dev.sh", "{{dest}}/start-dev.sh"}.
+{template, "mochiwebapp_skel/bench.sh", "{{dest}}/bench.sh"}.
 {template, "mochiwebapp_skel/priv/www/index.html", "{{dest}}/priv/www/index.html"}.
 {file, "../../.gitignore", "{{dest}}/.gitignore"}.
 {file, "../../Makefile", "{{dest}}/Makefile"}.
@@ -20,3 +21,4 @@
 {file, "../../rebar", "{{dest}}/rebar"}.
 {chmod, 8#755, "{{dest}}/rebar"}.
 {chmod, 8#755, "{{dest}}/start-dev.sh"}.
+{chmod, 8#755, "{{dest}}/bench.sh"}.
diff --git a/deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh b/deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh
new file mode 100755 (executable)
index 0000000..eb6e9c9
--- /dev/null
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# workaround for rebar mustache template bug
+DEFAULT_PORT={{port}}
+HOST=${HOST:-127.0.0.1}
+PORT=${PORT:-${DEFAULT_PORT}}
+
+BENCH_RUN="siege -q -c400 -r100 -b http://$HOST:$PORT/hello_world"
+
+sleep 120
+
+echo ""
+echo ""
+for i in `seq 1 10`;
+do
+    echo "Running test #$i:"
+    $BENCH_RUN
+    sleep 90
+done
similarity index 89%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_web.erl
rename to deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_web.erl
index 8976265311b4ba09393800ac242236c13c98eea8..8429a880f617cde3376b8faa3c119968b35daa20 100644 (file)
@@ -26,6 +26,9 @@ loop(Req, DocRoot) ->
         case Req:get(method) of
             Method when Method =:= 'GET'; Method =:= 'HEAD' ->
                 case Path of
+                  "hello_world" ->
+                    Req:respond({200, [{"Content-Type", "text/plain"}],
+                    "Hello world!\n"});
                     _ ->
                         Req:serve_file(Path, DocRoot)
                 end;
@@ -44,9 +47,8 @@ loop(Req, DocRoot) ->
                       {type, Type}, {what, What},
                       {trace, erlang:get_stacktrace()}],
             error_logger:error_report(Report),
-            %% NOTE: mustache templates need \\ because they are not awesome.
             Req:respond({500, [{"Content-Type", "text/plain"}],
-                         "request failed, sorry\\n"})
+                         "request failed, sorry\n"})
     end.
 
 %% Internal API
diff --git a/deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh b/deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh
new file mode 100755 (executable)
index 0000000..65c1692
--- /dev/null
@@ -0,0 +1,7 @@
+#!/bin/sh
+exec erl \
+    -pa ebin deps/*/ebin \
+    -boot start_sasl \
+    -sname {{appid}}_dev \
+    -s {{appid}} \
+    -s reloader
diff --git a/deps/rabbit/CODE_OF_CONDUCT.md b/deps/rabbit/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
similarity index 72%
rename from rabbitmq-server/plugins-src/mochiweb-wrapper/CONTRIBUTING.md
rename to deps/rabbit/CONTRIBUTING.md
index 69a4b4a437fdf25c45c200610d780c7a009146be..45bbcbe62e74c1a8682d2097db8eec955d177b9c 100644 (file)
@@ -20,22 +20,9 @@ If what you are going to work on is a substantial change, please first ask the c
 of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
 
 
-## (Brief) Code of Conduct
+## Code of Conduct
 
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
 
 
 ## Contributor Agreement
similarity index 100%
rename from rabbitmq-server/INSTALL
rename to deps/rabbit/INSTALL
diff --git a/deps/rabbit/LICENSE b/deps/rabbit/LICENSE
new file mode 100644 (file)
index 0000000..9feecea
--- /dev/null
@@ -0,0 +1,8 @@
+This package, the RabbitMQ server is licensed under the MPL. For the
+MPL, please see LICENSE-MPL-RabbitMQ.
+
+The files `mochijson2.erl' and `mochinum.erl' are (c) 2007 Mochi Media, Inc and
+licensed under a MIT license, see LICENSE-MIT-Mochi.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbit/LICENSE-MPL-RabbitMQ b/deps/rabbit/LICENSE-MPL-RabbitMQ
new file mode 100644 (file)
index 0000000..82c7cf5
--- /dev/null
@@ -0,0 +1,455 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the NPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is RabbitMQ.
+
+     The Initial Developer of the Original Code is Pivotal Software, Inc.
+     Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.''
+
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
diff --git a/deps/rabbit/Makefile b/deps/rabbit/Makefile
new file mode 100644 (file)
index 0000000..d1ffa56
--- /dev/null
@@ -0,0 +1,118 @@
+PROJECT = rabbit
+VERSION ?= $(call get_app_version,src/$(PROJECT).app.src)
+
+DEPS = ranch rabbit_common
+TEST_DEPS = rabbitmq_ct_helpers amqp_client meck proper
+
+define usage_xml_to_erl
+$(subst __,_,$(patsubst $(DOCS_DIR)/rabbitmq%.1.xml, src/rabbit_%_usage.erl, $(subst -,_,$(1))))
+endef
+
+DOCS_DIR     = docs
+MANPAGES     = $(patsubst %.xml, %, $(wildcard $(DOCS_DIR)/*.[0-9].xml))
+WEB_MANPAGES = $(patsubst %.xml, %.man.xml, $(wildcard $(DOCS_DIR)/*.[0-9].xml) $(DOCS_DIR)/rabbitmq-service.xml $(DOCS_DIR)/rabbitmq-echopid.xml)
+USAGES_XML   = $(DOCS_DIR)/rabbitmqctl.1.xml $(DOCS_DIR)/rabbitmq-plugins.1.xml
+USAGES_ERL   = $(foreach XML, $(USAGES_XML), $(call usage_xml_to_erl, $(XML)))
+
+EXTRA_SOURCES += $(USAGES_ERL)
+
+.DEFAULT_GOAL = all
+$(PROJECT).d:: $(EXTRA_SOURCES)
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk \
+             rabbit_common/mk/rabbitmq-run.mk \
+             rabbit_common/mk/rabbitmq-dist.mk \
+             rabbit_common/mk/rabbitmq-tools.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
+
+# --------------------------------------------------------------------
+# Compilation.
+# --------------------------------------------------------------------
+
+RMQ_ERLC_OPTS += -I $(DEPS_DIR)/rabbit_common/include
+
+ifdef INSTRUMENT_FOR_QC
+RMQ_ERLC_OPTS += -DINSTR_MOD=gm_qc
+else
+RMQ_ERLC_OPTS += -DINSTR_MOD=gm
+endif
+
+ifdef CREDIT_FLOW_TRACING
+RMQ_ERLC_OPTS += -DCREDIT_FLOW_TRACING=true
+endif
+
+ifndef USE_PROPER_QC
+# PropEr needs to be installed for property checking
+# http://proper.softlab.ntua.gr/
+USE_PROPER_QC := $(shell $(ERL) -eval 'io:format({module, proper} =:= code:ensure_loaded(proper)), halt().')
+RMQ_ERLC_OPTS += $(if $(filter true,$(USE_PROPER_QC)),-Duse_proper_qc)
+endif
+
+clean:: clean-extra-sources
+
+clean-extra-sources:
+       $(gen_verbose) rm -f $(EXTRA_SOURCES)
+
+# --------------------------------------------------------------------
+# Documentation.
+# --------------------------------------------------------------------
+
+# xmlto can not read from standard input, so we mess with a tmp file.
+%: %.xml $(DOCS_DIR)/examples-to-end.xsl
+       $(gen_verbose) xmlto --version | \
+           grep -E '^xmlto version 0\.0\.([0-9]|1[1-8])$$' >/dev/null || \
+           opt='--stringparam man.indent.verbatims=0' ; \
+       xsltproc --novalid $(DOCS_DIR)/examples-to-end.xsl $< > $<.tmp && \
+       xmlto -vv -o $(DOCS_DIR) $$opt man $< 2>&1 | (grep -v '^Note: Writing' || :) && \
+       test -f $@ && \
+       rm $<.tmp
+
+# Use tmp files rather than a pipeline so that we get meaningful errors
+# Do not fold the cp into previous line, it's there to stop the file being
+# generated but empty if we fail
+define usage_dep
+$(call usage_xml_to_erl, $(1)):: $(1) $(DOCS_DIR)/usage.xsl
+       $$(gen_verbose) xsltproc --novalid --stringparam modulename "`basename $$@ .erl`" \
+           $(DOCS_DIR)/usage.xsl $$< > $$@.tmp && \
+       sed -e 's/"/\\"/g' -e 's/%QUOTE%/"/g' $$@.tmp > $$@.tmp2 && \
+       fold -s $$@.tmp2 > $$@.tmp3 && \
+       mv $$@.tmp3 $$@ && \
+       rm $$@.tmp $$@.tmp2
+endef
+
+$(foreach XML,$(USAGES_XML),$(eval $(call usage_dep, $(XML))))
+
+# We rename the file before xmlto sees it since xmlto will use the name of
+# the file to make internal links.
+%.man.xml: %.xml $(DOCS_DIR)/html-to-website-xml.xsl
+       $(gen_verbose) cp $< `basename $< .xml`.xml && \
+           xmlto xhtml-nochunks `basename $< .xml`.xml ; \
+       rm `basename $< .xml`.xml && \
+       cat `basename $< .xml`.html | \
+           xsltproc --novalid $(DOCS_DIR)/remove-namespaces.xsl - | \
+             xsltproc --novalid --stringparam original `basename $<` $(DOCS_DIR)/html-to-website-xml.xsl - | \
+             xmllint --format - > $@ && \
+       rm `basename $< .xml`.html
+
+.PHONY: manpages web-manpages distclean-manpages
+
+docs:: manpages web-manpages
+
+manpages: $(MANPAGES)
+       @:
+
+web-manpages: $(WEB_MANPAGES)
+       @:
+
+distclean:: distclean-manpages
+
+distclean-manpages::
+       $(gen_verbose) rm -f $(MANPAGES) $(WEB_MANPAGES)
diff --git a/deps/rabbit/README b/deps/rabbit/README
new file mode 100644 (file)
index 0000000..43bfe00
--- /dev/null
@@ -0,0 +1 @@
+See http://rabbitmq.com and https://github.com/rabbitmq/rabbitmq-server.
diff --git a/deps/rabbit/README.md b/deps/rabbit/README.md
new file mode 100644 (file)
index 0000000..ae8fd80
--- /dev/null
@@ -0,0 +1,49 @@
+# RabbitMQ Server
+
+[RabbitMQ](http://rabbitmq.com) is a [feature rich](http://www.rabbitmq.com/features.html), multi-protocol messaging broker. It supports:
+
+ * AMQP 0-9-1
+ * STOMP 1.0 through 1.2
+ * MQTT 3.1.1
+ * AMQP 1.0
+
+
+## Installation
+
+ * [Installation guides](http://www.rabbitmq.com/download.html) for various platforms
+
+
+## Tutorials & Documentation
+
+ * [RabbitMQ tutorials](http://www.rabbitmq.com/getstarted.html)
+ * [Documentation guides](http://www.rabbitmq.com/documentation.html)
+ * [Documentation Source Code](https://github.com/rabbitmq/rabbitmq-website/)
+ * [Client libraries and tools](http://www.rabbitmq.com/devtools.html)
+ * [Tutorials Source Code](https://github.com/rabbitmq/rabbitmq-tutorials/)
+
+## Getting Help
+
+ * [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+ * `#rabbitmq` on Freenode
+ * [Commercial RabbitMQ support](http://www.rabbitmq.com/services.html) from [Pivotal](http://pivotal.io)
+
+
+## Contributing
+
+See [CONTRIBUTING.md](./CONTRIBUTING.md) and our [development process overview](http://www.rabbitmq.com/github.html).
+
+
+## License
+
+RabbitMQ server is [licensed under the MPL](LICENSE-MPL-RabbitMQ).
+
+
+## Building From Source and Packaging
+
+ * [Building RabbitMQ Server From Source](http://www.rabbitmq.com/build-server.html)
+ * [Building RabbitMQ Server Packages](http://www.rabbitmq.com/build-server.html)
+
+
+## Copyright
+
+(c) Pivotal Software Inc., 2007-2016.
diff --git a/deps/rabbit/check_xref b/deps/rabbit/check_xref
new file mode 100755 (executable)
index 0000000..78f932d
--- /dev/null
@@ -0,0 +1,291 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+-mode(compile).
+
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+main(["-h"]) ->
+    io:format("usage: check_xref PluginDirectory (options)~n"
+              "options:~n"
+              "      -q - quiet mode (only prints errors)~n"
+              "      -X - disables all filters~n");
+main([PluginsDir|Argv]) ->
+    put({?MODULE, quiet}, lists:member("-q", Argv)),
+    put({?MODULE, no_filters}, lists:member("-X", Argv)),
+
+    {ok, Cwd} = file:get_cwd(),
+    code:add_pathz(filename:join(Cwd, "ebin")),
+    LibDir = filename:join(Cwd, "lib"),
+    case filelib:is_dir(LibDir) of
+        false -> ok;
+        true  -> os:cmd("rm -rf " ++ LibDir)
+    end,
+    Rc = try
+             check(Cwd, PluginsDir, LibDir, checks())
+         catch
+             _:Err ->
+                 io:format(user, "failed: ~p~n", [Err]),
+                 1
+         end,
+    shutdown(Rc, LibDir).
+
+shutdown(Rc, LibDir) ->
+    os:cmd("rm -rf " ++ LibDir),
+    erlang:halt(Rc).
+
+check(Cwd, PluginsDir, LibDir, Checks) ->
+    {ok, Plugins} = file:list_dir(PluginsDir),
+    ok = file:make_dir(LibDir),
+    put({?MODULE, third_party}, []),
+    [begin
+        Source = filename:join(PluginsDir, Plugin),
+        Target = filename:join(LibDir, Plugin),
+        IsExternal = external_dependency(Plugin),
+        AppN = case IsExternal of
+                   true  -> filename:join(LibDir, unmangle_name(Plugin));
+                   false -> filename:join(
+                              LibDir, filename:basename(Plugin, ".ez"))
+               end,
+
+        report(info, "mkdir -p ~s~n", [Target]),
+        filelib:ensure_dir(Target),
+
+        report(info, "cp ~s ~s~n", [Source, Target]),
+        {ok, _} = file:copy(Source, Target),
+
+        report(info, "unzip -d ~s ~s~n", [LibDir, Target]),
+        {ok, _} = zip:unzip(Target, [{cwd, LibDir}]),
+
+        UnpackDir = filename:join(LibDir, filename:basename(Target, ".ez")),
+        report(info, "mv ~s ~s~n", [UnpackDir, AppN]),
+        ok = file:rename(UnpackDir, AppN),
+
+        code:add_patha(filename:join(AppN, "ebin")),
+        case IsExternal of
+            true -> App = list_to_atom(hd(string:tokens(filename:basename(AppN),
+                                                        "-"))),
+                    report(info, "loading ~p~n", [App]),
+                    application:load(App),
+                    store_third_party(App);
+            _    -> ok
+        end
+     end || Plugin <- Plugins,
+            lists:suffix(".ez", Plugin)],
+
+    RabbitAppEbin = filename:join([LibDir, "rabbit", "ebin"]),
+    filelib:ensure_dir(filename:join(RabbitAppEbin, "foo")),
+    {ok, Beams} = file:list_dir("ebin"),
+    [{ok, _} = file:copy(filename:join("ebin", Beam),
+                         filename:join(RabbitAppEbin, Beam)) || Beam <- Beams],
+    xref:start(?MODULE),
+    xref:set_default(?MODULE, [{verbose, false}, {warnings, false}]),
+    xref:set_library_path(?MODULE, code:get_path()),
+    xref:add_release(?MODULE, Cwd, {name, rabbit}),
+    store_unresolved_calls(),
+    Results = lists:flatten([perform_analysis(Q) || Q <- Checks]),
+    report(Results).
+
+%%
+%% Analysis
+%%
+
+perform_analysis({Query, Description, Severity}) ->
+    perform_analysis({Query, Description, Severity, fun(_) -> false end});
+perform_analysis({Query, Description, Severity, Filter}) ->
+    report_progress("Checking whether any code ~s "
+                    "(~s)~n", [Description, Query]),
+    case analyse(Query) of
+        {ok, Analysis} ->
+            [filter(Result, Filter) ||
+                Result <- process_analysis(Query, Description,
+                                           Severity, Analysis)];
+        {error, Module, Reason} ->
+            {analysis_error, {Module, Reason}}
+    end.
+
+partition(Results) ->
+    lists:partition(fun({{_, L}, _}) -> L =:= error end, Results).
+
+analyse(Query) when is_atom(Query) ->
+    xref:analyse(?MODULE, Query, [{verbose, false}]);
+analyse(Query) when is_list(Query) ->
+    xref:q(?MODULE, Query).
+
+process_analysis(Query, Tag, Severity, Analysis) when is_atom(Query) ->
+    [{{Tag, Severity}, MFA} || MFA <- Analysis];
+process_analysis(Query, Tag, Severity, Analysis) when is_list(Query) ->
+    [{{Tag, Severity}, Result} || Result <- Analysis].
+
+checks() ->
+   [{"(XXL)(Lin) ((XC - UC) || (XU - X - B))",
+     "has call to undefined function(s)",
+     error, filters()},
+    {"(Lin) (L - LU)",
+     "has unused local function(s)",
+     error, filters()},
+    {"(E | \"(rabbit|amqp).*\":_/_ || \"gen_server2?\":call/2)",
+     "has 5 sec timeout in",
+     error, filters()},
+    {"(Lin) (LU * (X - XU))",
+     "has exported function(s) only used locally",
+     warning, filters()},
+    {"(Lin) (DF * (XU + LU))", "used deprecated function(s)",
+     warning, filters()}].
+%%    {"(Lin) (X - XU)", "possibly unused export",
+%%     warning, fun filter_unused/1}].
+
+%%
+%% noise filters (can be disabled with -X) - strip uninteresting analyses
+%%
+
+filter(Result, Filter) ->
+    case Filter(Result) of
+        false -> Result;
+        true  -> []  %% NB: this gets flattened out later on....
+    end.
+
+filters() ->
+    case get({?MODULE, no_filters}) of
+        true  -> fun(_) -> false end;
+        _     -> filter_chain([fun is_unresolved_call/1, fun is_callback/1,
+                               fun is_unused/1, fun is_irrelevant/1])
+    end.
+
+filter_chain(FnChain) ->
+    fun(AnalysisResult) ->
+        Result = cleanup(AnalysisResult),
+        lists:foldl(fun(F, false) -> F(Result);
+                       (_F, true) -> true
+                    end, false, FnChain)
+    end.
+
+cleanup({{_, _},{{{{_,_,_}=MFA1,_},{{_,_,_}=MFA2,_}},_}}) -> {MFA1, MFA2};
+cleanup({{_, _},{{{_,_,_}=MFA1,_},{{_,_,_}=MFA2,_}}})     -> {MFA1, MFA2};
+cleanup({{_, _},{{_,_,_}=MFA1,{_,_,_}=MFA2},_})           -> {MFA1, MFA2};
+cleanup({{_, _},{{_,_,_}=MFA1,{_,_,_}=MFA2}})             -> {MFA1, MFA2};
+cleanup({{_, _}, {_,_,_}=MFA})                            -> MFA;
+cleanup({{_, _}, {{_,_,_}=MFA,_}})                        -> MFA;
+cleanup({{_,_,_}=MFA, {_,_,_}})                           -> MFA;
+cleanup({{_,_,_}=MFA, {_,_,_},_})                         -> MFA;
+cleanup(Other)                                            -> Other.
+
+is_irrelevant({{M,_,_}, {_,_,_}}) ->
+    is_irrelevant(M);
+is_irrelevant({M,_,_}) ->
+    is_irrelevant(M);
+is_irrelevant(Mod) when is_atom(Mod) ->
+    lists:member(Mod, get({?MODULE, third_party})).
+
+is_unused({{_,_,_}=MFA, {_,_,_}}) ->
+    is_unused(MFA);
+is_unused({M,_F,_A}) ->
+    lists:suffix("_tests", atom_to_list(M));
+is_unused(_) ->
+    false.
+
+is_unresolved_call({_, F, A}) ->
+    UC = get({?MODULE, unresolved_calls}),
+    sets:is_element({'$M_EXPR', F, A}, UC);
+is_unresolved_call(_) ->
+    false.
+
+%% TODO: cache this....
+is_callback({M,_,_}=MFA) ->
+    Attributes = M:module_info(attributes),
+    Behaviours = proplists:append_values(behaviour, Attributes),
+    {_, Callbacks} = lists:foldl(fun acc_behaviours/2, {M, []}, Behaviours),
+    lists:member(MFA, Callbacks);
+is_callback(_) ->
+    false.
+
+acc_behaviours(B, {M, CB}=Acc) ->
+    case catch(B:behaviour_info(callbacks)) of
+        [{_,_} | _] = Callbacks ->
+            {M, CB ++ [{M, F, A} || {F,A} <- Callbacks]};
+        _ ->
+            Acc
+    end.
+
+%%
+%% reporting/output
+%%
+
+report(Results) ->
+    [report_failures(F) || F <- Results],
+    {Errors, Warnings} = partition(Results),
+    report(info, "Completed: ~p errors, ~p warnings~n",
+                 [length(Errors), length(Warnings)]),
+    case length(Errors) > 0 of
+        true  -> 1;
+        false -> 0
+    end.
+
+report_failures({analysis_error, {Mod, Reason}}) ->
+    report(error, "~s:0 Analysis Error: ~p~n", [source_file(Mod), Reason]);
+report_failures({{Tag, Level}, {{{{M,_,_},L},{{M2,F2,A2},_}},_}}) ->
+    report(Level, "~s:~w ~s ~p:~p/~p~n",
+           [source_file(M), L, Tag, M2, F2, A2]);
+report_failures({{Tag, Level}, {{M,F,A},L}}) ->
+    report(Level, "~s:~w ~s ~p:~p/~p~n", [source_file(M), L, Tag, M, F, A]);
+report_failures({{Tag, Level}, {M,F,A}}) ->
+    report(Level, "~s:unknown ~s ~p:~p/~p~n", [source_file(M), Tag, M, F, A]);
+report_failures(Term) ->
+    report(error, "Ignoring ~p~n", [Term]),
+    ok.
+
+report_progress(Fmt, Args) ->
+    report(info, Fmt, Args).
+
+report(Level, Fmt, Args) ->
+    case {get({?MODULE, quiet}), Level} of
+        {true,  error} -> do_report(lookup_prefix(Level), Fmt, Args);
+        {false, _}     -> do_report(lookup_prefix(Level), Fmt, Args);
+        _              -> ok
+    end.
+
+do_report(Prefix, Fmt, Args) ->
+    io:format(Prefix ++ Fmt, Args).
+
+lookup_prefix(error)   -> "ERROR: ";
+lookup_prefix(warning) -> "WARNING: ";
+lookup_prefix(info)    -> "INFO: ".
+
+source_file(M) ->
+    proplists:get_value(source, M:module_info(compile)).
+
+%%
+%% setup/code-path/file-system ops
+%%
+
+store_third_party(App) ->
+    {ok, AppConfig} = application:get_all_key(App),
+    AppModules = proplists:get_value(modules, AppConfig),
+    put({?MODULE, third_party}, AppModules ++ get({?MODULE, third_party})).
+
+%% TODO: this ought not to be maintained in such a fashion
+external_dependency(Path) ->
+    lists:any(fun(P) -> lists:prefix(P, Path) end,
+              ["mochiweb", "webmachine", "rfc4627", "eldap"]).
+
+unmangle_name(Path) ->
+    [Name, Vsn | _] = re:split(Path, "-", [{return, list}]),
+    string:join([Name, Vsn], "-").
+
+store_unresolved_calls() ->
+    {ok, UCFull} = analyse("UC"),
+    UC = [MFA || {_, {_,_,_} = MFA} <- UCFull],
+    put({?MODULE, unresolved_calls}, sets:from_list(UC)).
diff --git a/deps/rabbit/docs/README-for-packages b/deps/rabbit/docs/README-for-packages
new file mode 100644 (file)
index 0000000..f26889b
--- /dev/null
@@ -0,0 +1,24 @@
+This is rabbitmq-server, a message broker implementing AMQP, STOMP and MQTT.
+
+Most of the documentation for RabbitMQ is provided on the RabbitMQ web
+site. You can see documentation for the current version at:
+
+http://www.rabbitmq.com/documentation.html
+
+and for previous versions at:
+
+http://www.rabbitmq.com/previous.html
+
+Man pages are installed with this package. Of particular interest are
+rabbitmqctl(1), to interact with a running RabbitMQ server, and
+rabbitmq-plugins(1), to enable and disable plugins. These should be
+run as the superuser.
+
+An example configuration file is provided in the same directory as
+this README. Copy it to /etc/rabbitmq/rabbitmq.config to use it. The
+RabbitMQ server must be restarted after changing the configuration
+file.
+
+An example policy file for HA queues is provided in the same directory
+as this README. Copy and chmod +x it to
+/usr/local/sbin/set_rabbitmq_policy to use it with the Pacemaker OCF RA.
diff --git a/deps/rabbit/docs/rabbitmq-server.service.example b/deps/rabbit/docs/rabbitmq-server.service.example
new file mode 100644 (file)
index 0000000..1aa6549
--- /dev/null
@@ -0,0 +1,18 @@
+# systemd unit example
+[Unit]
+Description=RabbitMQ broker
+After=network.target epmd@0.0.0.0.socket
+Wants=network.target epmd@0.0.0.0.socket
+
+[Service]
+Type=notify
+User=rabbitmq
+Group=rabbitmq
+NotifyAccess=all
+TimeoutStartSec=3600
+WorkingDirectory=/var/lib/rabbitmq
+ExecStart=/usr/lib/rabbitmq/bin/rabbitmq-server
+ExecStop=/usr/lib/rabbitmq/bin/rabbitmqctl stop
+
+[Install]
+WantedBy=multi-user.target
similarity index 87%
rename from rabbitmq-server/docs/rabbitmq.config.example
rename to deps/rabbit/docs/rabbitmq.config.example
index 9b3855cd86930ffa8e870c548db9db90402c0881..f425726721cc11ee0a254dc02cdfe933b74bc5fd 100644 (file)
    %%
    %% {ssl_listeners, [5671]},
 
+   %% Number of Erlang processes that will accept connections for the TCP
+   %% and SSL listeners.
+   %%
+   %% {num_tcp_acceptors, 10},
+   %% {num_ssl_acceptors, 1},
+
    %% Maximum time for AMQP 0-8/0-9/0-9-1 handshake (after socket connection
    %% and SSL handshake), in milliseconds.
    %%
    %%
    %% {ssl_handshake_timeout, 5000},
 
+   %% Password hashing implementation. Will only affect newly
+   %% created users. To recalculate hash for an existing user
+   %% it's necessary to update her password.
+   %%
+   %% {password_hashing_module, rabbit_password_hashing_sha256},
+
+   %% Configuration entry encryption.
+   %% See http://www.rabbitmq.com/configure.html#configuration-encryption
+   %%
+   %% To specify the passphrase in the configuration file:
+   %%
+   %% {config_entry_decoder, [{passphrase, <<"mypassphrase">>}]}
+   %%
+   %% To specify the passphrase in an external file:
+   %%
+   %% {config_entry_decoder, [{passphrase, {file, "/path/to/passphrase/file"}}]}
+   %%
+   %% To make the broker request the passphrase when it starts:
+   %%
+   %% {config_entry_decoder, [{passphrase, prompt}]}
+   %%
+   %% To change encryption settings:
+   %%
+   %% {config_entry_decoder, [{cipher,     aes_cbc256},
+   %%                         {hash,       sha512},
+   %%                         {iterations, 1000}]}
+
    %%
    %% Default User / VHost
    %% ====================
 
    %% Set the default AMQP heartbeat delay (in seconds).
    %%
-   %% {heartbeat, 600},
+   %% {heartbeat, 60},
 
    %% Set the max permissible size of an AMQP frame (in bytes).
    %%
    %% {frame_max, 131072},
 
+   %% Set the max frame size the server will accept before connection
+   %% tuning occurs
+   %%
+   %% {initial_frame_max, 4096},
+
    %% Set the max permissible number of channels per connection.
    %% 0 means "no limit".
    %%
    %% See (http://www.erlang.org/doc/man/inet.html#setopts-2) for
    %% further documentation.
    %%
-   %% {tcp_listen_options, [binary,
-   %%                       {packet,        raw},
-   %%                       {reuseaddr,     true},
-   %%                       {backlog,       128},
+   %% {tcp_listen_options, [{backlog,       128},
    %%                       {nodelay,       true},
    %%                       {exit_on_close, false}]},
 
    %%
    %% {vm_memory_high_watermark, 0.4},
 
+   %% Alternatively, we can set a limit (in bytes) of RAM used by the node.
+   %%
+   %% {vm_memory_high_watermark, {absolute, 1073741824}},
+   %%
+   %% Or you can set absolute value using memory units.
+   %%
+   %% {vm_memory_high_watermark, {absolute, "1024M"}},
+   %%
+   %% Supported units suffixes:
+   %%
+   %% k, kiB: kibibytes (2^10 bytes)
+   %% M, MiB: mebibytes (2^20)
+   %% G, GiB: gibibytes (2^30)
+   %% kB: kilobytes (10^3)
+   %% MB: megabytes (10^6)
+   %% GB: gigabytes (10^9)
+
    %% Fraction of the high watermark limit at which queues start to
    %% page message out to disc in order to free up memory.
    %%
+   %% Values greater than 0.9 can be dangerous and should be used carefully.
+   %%
    %% {vm_memory_high_watermark_paging_ratio, 0.5},
 
+   %% Interval (in milliseconds) at which we perform the check of the memory
+   %% levels against the watermarks.
+   %%
+   %% {memory_monitor_interval, 2500},
+
    %% Set disk free limit (in bytes). Once free disk space reaches this
    %% lower bound, a disk alarm will be set - see the documentation
    %% listed above for more details.
    %%
    %% {disk_free_limit, 50000000},
+   %%
+   %% Or you can set it using memory units (same as in vm_memory_high_watermark)
+   %% {disk_free_limit, "50MB"},
+   %% {disk_free_limit, "50000kB"},
+   %% {disk_free_limit, "2GB"},
 
    %% Alternatively, we can set a limit relative to total available RAM.
    %%
-   %% {disk_free_limit, {mem_relative, 1.0}},
+   %% Values lower than 1.0 can be dangerous and should be used carefully.
+   %% {disk_free_limit, {mem_relative, 2.0}},
 
    %%
    %% Misc/Advanced Options
    %% Listen for SSL connections on a specific port.
    %% {ssl_listeners, [61614]},
 
+   %% Number of Erlang processes that will accept connections for the TCP
+   %% and SSL listeners.
+   %%
+   %% {num_tcp_acceptors, 10},
+   %% {num_ssl_acceptors, 1},
+
    %% Additional SSL options
 
    %% Extract a name from the client's certificate when using SSL.
    %% {tcp_listeners, [1883]},
    %% {ssl_listeners, []},
 
+   %% Number of Erlang processes that will accept connections for the TCP
+   %% and SSL listeners.
+   %%
+   %% {num_tcp_acceptors, 10},
+   %% {num_ssl_acceptors, 1},
+
    %% TCP/Socket options (as per the broker configuration).
    %%
-   %% {tcp_listen_options, [binary,
-   %%                       {packet,    raw},
-   %%                       {reuseaddr, true},
-   %%                       {backlog,   128},
+   %% {tcp_listen_options, [{backlog,   128},
    %%                       {nodelay,   true}]}
   ]},
 
similarity index 85%
rename from rabbitmq-server/docs/rabbitmqctl.1.xml
rename to deps/rabbit/docs/rabbitmqctl.1.xml
index 92d48466383d6c0135a8a39b8847c665b6b46453..217d2d93ca1fe064deeee0a9b47ba4787c6c3cff 100644 (file)
               suffixed files.
             </para>
             <para>
-              When the target files do not exist they are created.
-              When no <option>suffix</option> is specified, the empty
-              log files are simply created at the original location;
-              no rotation takes place.
+              When the target files do not exist they are created. When
+              no <option>suffix</option> is specified, no rotation takes
+              place - log files are just re-opened.
             </para>
             <para role="example-prefix">For example:</para>
             <screen role="example">rabbitmqctl rotate_logs .1</screen>
             </para>
           </listitem>
         </varlistentry>
+
+        <varlistentry>
+          <term><cmdsynopsis><command>hipe_compile</command> <arg choice="req"><replaceable>directory</replaceable></arg></cmdsynopsis></term>
+          <listitem>
+            <para>
+              Performs HiPE-compilation and caches resulting
+              .beam-files in the given directory.
+            </para>
+            <para>
+              Parent directories are created if necessary. Any
+              existing <command>.beam</command> files from the
+              directory are automatically deleted prior to
+              compilation.
+            </para>
+            <para>
+              To use this precompiled files, you should set
+              <command>RABBITMQ_SERVER_CODE_PATH</command> environment
+              variable to directory specified in
+              <command>hipe_compile</command> invokation.
+            </para>
+            <para role="example-prefix">For example:</para>
+            <screen role="example">rabbitmqctl hipe_compile /tmp/rabbit-hipe/ebin</screen>
+            <para role="example">
+              HiPE-compiles modules and stores them to /tmp/rabbit-hipe/ebin directory.
+            </para>
+          </listitem>
+        </varlistentry>
       </variablelist>
     </refsect2>
 
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>sync_queue</command> <arg choice="req">queue</arg></cmdsynopsis>
+          <term><cmdsynopsis><command>sync_queue</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req">queue</arg></cmdsynopsis>
           </term>
           <listitem>
             <variablelist>
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>cancel_sync_queue</command> <arg choice="req">queue</arg></cmdsynopsis>
+          <term><cmdsynopsis><command>cancel_sync_queue</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req">queue</arg></cmdsynopsis>
           </term>
           <listitem>
             <variablelist>
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>purge_queue</command> <arg choice="req">queue</arg></cmdsynopsis>
+          <term><cmdsynopsis><command>purge_queue</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req">queue</arg></cmdsynopsis>
           </term>
           <listitem>
             <variablelist>
           </listitem>
         </varlistentry>
 
+        <varlistentry>
+          <term>
+          <cmdsynopsis><command>authenticate_user</command> <arg choice="req"><replaceable>username</replaceable></arg> <arg choice="req"><replaceable>password</replaceable></arg></cmdsynopsis>
+          </term>
+          <listitem>
+            <variablelist>
+              <varlistentry>
+                <term>username</term>
+                <listitem><para>The name of the user.</para></listitem>
+              </varlistentry>
+              <varlistentry>
+                <term>password</term>
+                <listitem><para>The password of the user.</para></listitem>
+              </varlistentry>
+            </variablelist>
+            <para role="example-prefix">For example:</para>
+            <screen role="example">rabbitmqctl authenticate_user tonyg verifyit</screen>
+            <para role="example">
+              This command instructs the RabbitMQ broker to authenticate the
+              user named <command>tonyg</command> with password
+              <command>verifyit</command>.
+            </para>
+          </listitem>
+        </varlistentry>
+
         <varlistentry>
           <term><cmdsynopsis><command>set_user_tags</command> <arg choice="req"><replaceable>username</replaceable></arg> <arg choice="req"><replaceable>tag</replaceable> ...</arg></cmdsynopsis></term>
           <listitem>
       </para>
       <variablelist>
         <varlistentry>
-          <term><cmdsynopsis><command>add_vhost</command> <arg choice="req"><replaceable>vhostpath</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>add_vhost</command> <arg choice="req"><replaceable>vhost</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <variablelist>
               <varlistentry>
-                <term>vhostpath</term>
+                <term>vhost</term>
                 <listitem><para>The name of the virtual host entry to create.</para></listitem>
               </varlistentry>
             </variablelist>
         </varlistentry>
 
         <varlistentry>
-          <term><cmdsynopsis><command>delete_vhost</command> <arg choice="req"><replaceable>vhostpath</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>delete_vhost</command> <arg choice="req"><replaceable>vhost</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <variablelist>
               <varlistentry>
-                <term>vhostpath</term>
+                <term>vhost</term>
                 <listitem><para>The name of the virtual host entry to delete.</para></listitem>
               </varlistentry>
             </variablelist>
         </varlistentry>
 
         <varlistentry>
-          <term><cmdsynopsis><command>set_permissions</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="req"><replaceable>user</replaceable></arg> <arg choice="req"><replaceable>conf</replaceable></arg> <arg choice="req"><replaceable>write</replaceable></arg> <arg choice="req"><replaceable>read</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>set_permissions</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req"><replaceable>user</replaceable></arg> <arg choice="req"><replaceable>conf</replaceable></arg> <arg choice="req"><replaceable>write</replaceable></arg> <arg choice="req"><replaceable>read</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <variablelist>
               <varlistentry>
-                <term>vhostpath</term>
+                <term>vhost</term>
                 <listitem><para>The name of the virtual host to which to grant the user access, defaulting to <command>/</command>.</para></listitem>
               </varlistentry>
               <varlistentry>
         </varlistentry>
 
         <varlistentry>
-          <term><cmdsynopsis><command>clear_permissions</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="req"><replaceable>username</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>clear_permissions</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req"><replaceable>username</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <variablelist>
               <varlistentry>
-                <term>vhostpath</term>
+                <term>vhost</term>
                 <listitem><para>The name of the virtual host to which to deny the user access, defaulting to <command>/</command>.</para></listitem>
               </varlistentry>
               <varlistentry>
         </varlistentry>
 
         <varlistentry>
-          <term><cmdsynopsis><command>list_permissions</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>list_permissions</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <variablelist>
               <varlistentry>
-                <term>vhostpath</term>
+                <term>vhost</term>
                 <listitem><para>The name of the virtual host for which to list the users that have been granted access to it, and their permissions. Defaults to <command>/</command>.</para></listitem>
               </varlistentry>
             </variablelist>
       </para>
       <variablelist>
         <varlistentry>
-          <term><cmdsynopsis><command>set_parameter</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="req"><replaceable>component_name</replaceable></arg> <arg choice="req"><replaceable>name</replaceable></arg> <arg choice="req"><replaceable>value</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>set_parameter</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req"><replaceable>component_name</replaceable></arg> <arg choice="req"><replaceable>name</replaceable></arg> <arg choice="req"><replaceable>value</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               Sets a parameter.
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>clear_parameter</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="req"><replaceable>component_name</replaceable></arg> <arg choice="req"><replaceable>key</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>clear_parameter</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req"><replaceable>component_name</replaceable></arg> <arg choice="req"><replaceable>key</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               Clears a parameter.
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>list_parameters</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>list_parameters</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               Lists all parameters for a virtual host.
       </para>
       <variablelist>
         <varlistentry>
-          <term><cmdsynopsis><command>set_policy</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="opt">--priority <replaceable>priority</replaceable></arg> <arg choice="opt">--apply-to <replaceable>apply-to</replaceable></arg> <arg choice="req"><replaceable>name</replaceable></arg> <arg choice="req"><replaceable>pattern</replaceable></arg>  <arg choice="req"><replaceable>definition</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>set_policy</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="opt">--priority <replaceable>priority</replaceable></arg> <arg choice="opt">--apply-to <replaceable>apply-to</replaceable></arg> <arg choice="req"><replaceable>name</replaceable></arg> <arg choice="req"><replaceable>pattern</replaceable></arg>  <arg choice="req"><replaceable>definition</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               Sets a policy.
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>clear_policy</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="req"><replaceable>name</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>clear_policy</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="req"><replaceable>name</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               Clears a policy.
           </listitem>
         </varlistentry>
         <varlistentry>
-          <term><cmdsynopsis><command>list_policies</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>list_policies</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               Lists all policies for a virtual host.
 
       <variablelist>
         <varlistentry role="usage-has-option-list">
-          <term><cmdsynopsis><command>list_queues</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="opt" role="usage-option-list"><replaceable>queueinfoitem</replaceable> ...</arg></cmdsynopsis></term>
+          <term>
+            <cmdsynopsis><command>list_queues</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <group choice="opt"><arg>--offline</arg><arg>--online</arg><arg>--local</arg></group> <arg choice="opt" role="usage-option-list"><replaceable>queueinfoitem</replaceable> ...</arg></cmdsynopsis>
+          </term>
           <listitem>
             <para>
               Returns queue details. Queue details of the <command>/</command> virtual host
               are returned if the "-p" flag is absent. The "-p" flag can be used to
               override this default.
             </para>
+            <para>
+              Displayed queues can be filtered by their status or
+              location using one of the following mutually exclusive
+              options:
+              <variablelist>
+                <varlistentry>
+                  <term><cmdsynopsis><arg choice="opt">--offline</arg></cmdsynopsis></term>
+                  <listitem>
+                    <para>
+                      List only those durable queues that are not
+                      currently available (more specifically, their master node isn't).
+                    </para>
+                  </listitem>
+                </varlistentry>
+                <varlistentry>
+                  <term><cmdsynopsis><arg choice="opt">--online</arg></cmdsynopsis></term>
+                  <listitem>
+                    <para>
+                      List queues that are currently available (their master node is).
+                    </para>
+                  </listitem>
+                </varlistentry>
+                <varlistentry>
+                  <term><cmdsynopsis><arg choice="opt">--local</arg></cmdsynopsis></term>
+                  <listitem>
+                    <para>
+                      List only those queues whose master process is
+                      located on the current node.
+                    </para>
+                  </listitem>
+                </varlistentry>
+              </variablelist>
+            </para>
             <para>
               The <command>queueinfoitem</command> parameter is used to indicate which queue
               information items to include in the results. The column order in the
                   which is the exclusive owner of the queue. Empty if the
                   queue is non-exclusive.</para></listitem>
               </varlistentry>
+              <varlistentry>
+                <term>exclusive</term>
+                <listitem><para>True if queue is exclusive (i.e. has
+                  owner_pid), false otherwise</para></listitem>
+              </varlistentry>
               <varlistentry>
                 <term>exclusive_consumer_pid</term>
                 <listitem><para>Id of the Erlang process representing the channel of the
                 <term>message_bytes_persistent</term>
                 <listitem><para>Like <command>message_bytes</command> but counting only those messages which are persistent.</para></listitem>
               </varlistentry>
+              <varlistentry>
+                <term>head_message_timestamp</term>
+                <listitem><para>The timestamp property of the first message in the queue, if present. Timestamps of messages only appear when they are in the paged-in state.</para></listitem>
+              </varlistentry>
               <varlistentry>
                 <term>disk_reads</term>
                 <listitem><para>Total number of times messages have been read from disk by this queue since it started.</para></listitem>
         </varlistentry>
 
         <varlistentry role="usage-has-option-list">
-          <term><cmdsynopsis><command>list_exchanges</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="opt" role="usage-option-list"><replaceable>exchangeinfoitem</replaceable> ...</arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>list_exchanges</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="opt" role="usage-option-list"><replaceable>exchangeinfoitem</replaceable> ...</arg></cmdsynopsis></term>
           <listitem>
             <para>
               Returns exchange details. Exchange details of the <command>/</command> virtual host
         </varlistentry>
 
         <varlistentry role="usage-has-option-list">
-          <term><cmdsynopsis><command>list_bindings</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg> <arg choice="opt" role="usage-option-list"><replaceable>bindinginfoitem</replaceable> ...</arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>list_bindings</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg> <arg choice="opt" role="usage-option-list"><replaceable>bindinginfoitem</replaceable> ...</arg></cmdsynopsis></term>
           <listitem>
             <para>
               Returns binding details. By default the bindings for
         </varlistentry>
 
         <varlistentry>
-          <term><cmdsynopsis><command>list_consumers</command> <arg choice="opt">-p <replaceable>vhostpath</replaceable></arg></cmdsynopsis></term>
+          <term><cmdsynopsis><command>list_consumers</command> <arg choice="opt">-p <replaceable>vhost</replaceable></arg></cmdsynopsis></term>
           <listitem>
             <para>
               List consumers, i.e. subscriptions to a queue's message
           </listitem>
         </varlistentry>
 
+        <varlistentry>
+          <term><cmdsynopsis><command>node_health_check</command></cmdsynopsis></term>
+          <listitem>
+            <para>
+              Health check of the RabbitMQ node. Verifies the rabbit application is
+              running, list_queues and list_channels return, and alarms are not set.
+            </para>
+            <para role="example-prefix">For example:</para>
+            <screen role="example">rabbitmqctl node_health_check -n rabbit@stringer</screen>
+            <para role="example">
+              This command performs a health check on the RabbitMQ node.
+            </para>
+          </listitem>
+        </varlistentry>
+
         <varlistentry>
           <term><cmdsynopsis><command>environment</command></cmdsynopsis></term>
           <listitem>
             </variablelist>
           </listitem>
         </varlistentry>
+        <varlistentry>
+          <term><cmdsynopsis><command>set_vm_memory_high_watermark absolute</command> <arg choice="req"><replaceable>memory_limit</replaceable></arg></cmdsynopsis></term>
+          <listitem>
+            <variablelist>
+              <varlistentry>
+                <term>memory_limit</term>
+                <listitem><para>
+                    The new memory limit at which flow control is
+                    triggered, expressed in bytes as an integer number
+                    greater than or equal to 0 or as a string with memory units
+                    (e.g. 512M or 1G). Available units are:
+                    k, kiB: kibibytes (2^10 bytes)
+                    M, MiB: mebibytes (2^20)
+                    G, GiB: gibibytes (2^30)
+                    kB: kilobytes (10^3)
+                    MB: megabytes (10^6)
+                    GB: gigabytes (10^9)
+                </para></listitem>
+              </varlistentry>
+            </variablelist>
+          </listitem>
+        </varlistentry>
+        <varlistentry>
+          <term><cmdsynopsis><command>set_disk_free_limit</command> <arg choice="req"><replaceable>disk_limit</replaceable></arg></cmdsynopsis></term>
+          <listitem>
+            <variablelist>
+              <varlistentry>
+                <term>disk_limit</term>
+                <listitem><para>
+                    Lower bound limit as an integer in bytes or a string with memory units (see vm_memory_high_watermark),
+                    e.g. 512M or 1G. Once free disk space reaches the limit, a disk alarm will be set.
+                </para></listitem>
+              </varlistentry>
+            </variablelist>
+          </listitem>
+        </varlistentry>
+        <varlistentry>
+          <term><cmdsynopsis><command>set_disk_free_limit mem_relative</command> <arg choice="req"><replaceable>fraction</replaceable></arg></cmdsynopsis></term>
+          <listitem>
+            <variablelist>
+              <varlistentry>
+                <term>fraction</term>
+                <listitem><para>
+                    Limit relative to the total amount available RAM 
+                    as a non-negative floating point number. 
+                    Values lower than 1.0 can be dangerous and 
+                    should be used carefully.
+                </para></listitem>
+              </varlistentry>
+            </variablelist>
+          </listitem>
+        </varlistentry>
+        <varlistentry>
+          <!-- one-line formatting matters for rabbit_ctl_usage.erl code generation -->
+          <term><cmdsynopsis><command>encode</command> <arg choice="opt">--decode</arg> <arg choice="opt"><replaceable>value</replaceable></arg> <arg choice="opt"><replaceable>passphrase</replaceable></arg> <arg choice="opt">--list-ciphers</arg> <arg choice="opt">--list-hashes</arg> <arg choice="opt">--cipher <replaceable>cipher</replaceable></arg> <arg choice="opt">--hash <replaceable>hash</replaceable></arg> <arg choice="opt">--iterations <replaceable>iterations</replaceable></arg></cmdsynopsis>
+          </term>
+          <listitem>
+            <variablelist>
+              <varlistentry>
+                <term><cmdsynopsis><arg choice="opt">--decode</arg></cmdsynopsis></term>
+                <listitem>
+                  <para>
+                  Flag to decrypt the input value.
+                  </para>
+                  <para role="example-prefix">For example:</para>
+                  <screen role="example">rabbitmqctl encode --decode '{encrypted,'&lt;&lt;"..."&gt;&gt;}' mypassphrase</screen>
+                </listitem>
+              </varlistentry>
+              <varlistentry>
+                <term>
+                  <cmdsynopsis>
+                    <arg choice="opt"><replaceable>value</replaceable></arg>
+                    <arg choice="opt"><replaceable>passphrase</replaceable></arg>
+                  </cmdsynopsis>
+                </term>
+                <listitem>
+                  <para>
+                    Value to encrypt/decrypt and passphrase.
+                  </para>
+                  <para role="example-prefix">For example:</para>
+                  <screen role="example">rabbitmqctl encode '&lt;&lt;"guest"&gt;&gt;' mypassphrase</screen>
+                  <screen role="example">rabbitmqctl encode --decode '{encrypted,'&lt;&lt;"..."&gt;&gt;}' mypassphrase</screen>
+                </listitem>
+              </varlistentry>
+              <varlistentry>
+                <term><cmdsynopsis><arg choice="opt">--list-ciphers</arg></cmdsynopsis></term>
+                <listitem>
+                  <para>
+                    Flag to list the supported ciphers.
+                  </para>
+                  <para role="example-prefix">For example:</para>
+                  <screen role="example">rabbitmqctl encode --list-ciphers</screen>
+                </listitem>
+              </varlistentry>
+              <varlistentry>
+                <term><cmdsynopsis><arg choice="opt">--list-hashes</arg></cmdsynopsis></term>
+                <listitem>
+                  <para>
+                    Flag to list the supported hash algorithms.
+                  </para>
+                  <para role="example-prefix">For example:</para>
+                  <screen role="example">rabbitmqctl encode --list-hashes</screen>
+                </listitem>
+              </varlistentry>
+              <varlistentry>
+                <term>
+                  <cmdsynopsis>
+                    <arg choice="opt">--cipher <replaceable>cipher</replaceable></arg>
+                    <arg choice="opt">--hash <replaceable>hash</replaceable></arg>
+                    <arg choice="opt">--iterations <replaceable>iterations</replaceable></arg>
+                  </cmdsynopsis>
+                </term>
+                <listitem>
+                  <para>
+                    Options to specify the encryption settings. They can be used independently.
+                  </para>
+                  <para role="example-prefix">For example:</para>
+                  <screen role="example">
+rabbitmqctl encode --cipher blowfish_cfb64 --hash sha256 --iterations 10000 \
+                   '&lt;&lt;"guest"&gt;&gt;' mypassphrase</screen>
+                </listitem>
+              </varlistentry>
+            </variablelist>
+          </listitem>
+        </varlistentry>
       </variablelist>
     </refsect2>
   </refsect1>
diff --git a/deps/rabbit/docs/set_rabbitmq_policy.sh.example b/deps/rabbit/docs/set_rabbitmq_policy.sh.example
new file mode 100644 (file)
index 0000000..f46e901
--- /dev/null
@@ -0,0 +1,4 @@
+# This script is called by rabbitmq-server-ha.ocf during RabbitMQ
+# cluster start up. It is a convenient place to set your cluster
+# policy here, for example:
+# ${OCF_RESKEY_ctl} set_policy ha-all "." '{"ha-mode":"all", "ha-sync-mode":"automatic"}' --apply-to all --priority 0
similarity index 90%
rename from rabbitmq-server/docs/usage.xsl
rename to deps/rabbit/docs/usage.xsl
index 586f830327e2b46f4558e64d1802b9805850805a..0d3cb6f235c4ffaedbc79455b139cc7190a3c62f 100644 (file)
@@ -8,7 +8,7 @@
               encoding="UTF-8"
               indent="no"/>
 <xsl:strip-space elements="*"/>
-<xsl:preserve-space elements="cmdsynopsis arg" />
+<xsl:preserve-space elements="cmdsynopsis arg group" />
 
 <xsl:template match="/">
 <!-- Pull out cmdsynopsis to show the command usage line. -->%% Generated, do not edit!
@@ -68,6 +68,14 @@ usage() -> %QUOTE%Usage:
 <!-- Don't show anything else in command usage -->
 <xsl:template match="text()" mode="command-usage"/>
 
+<xsl:template match="group[@choice='opt']">
+  <xsl:text>[</xsl:text>
+  <xsl:for-each select="arg">
+    <xsl:apply-templates/>
+    <xsl:if test="not(position() = last())"><xsl:text>|</xsl:text></xsl:if>
+  </xsl:for-each>
+  <xsl:text>]</xsl:text>
+</xsl:template>
 <xsl:template match="arg[@choice='opt']">[<xsl:apply-templates/>]</xsl:template>
 <xsl:template match="replaceable">&lt;<xsl:value-of select="."/>&gt;</xsl:template>
 
diff --git a/deps/rabbit/erlang.mk b/deps/rabbit/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 59%
rename from rabbitmq-server/include/gm_specs.hrl
rename to deps/rabbit/include/gm_specs.hrl
index bc20b4415d11a04c6c8a9c1d92df790f530ccf57..d03f9938e5652573bc72f021d820c2ff2cc5fa0e 100644 (file)
 %% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
 %%
 
--ifdef(use_specs).
+-type callback_result() :: 'ok' | {'stop', any()} | {'become', atom(), args()}.
+-type args() :: any().
+-type members() :: [pid()].
 
--type(callback_result() :: 'ok' | {'stop', any()} | {'become', atom(), args()}).
--type(args() :: any()).
--type(members() :: [pid()]).
-
--spec(joined/2           :: (args(), members())    -> callback_result()).
--spec(members_changed/3  :: (args(), members(),members()) -> callback_result()).
--spec(handle_msg/3       :: (args(), pid(), any()) -> callback_result()).
--spec(handle_terminate/2 :: (args(), term())       -> any()).
-
--endif.
+-spec joined(args(), members())                    -> callback_result().
+-spec members_changed(args(), members(),members()) -> callback_result().
+-spec handle_msg(args(), pid(), any())             -> callback_result().
+-spec handle_terminate(args(), term())             -> any().
similarity index 53%
rename from rabbitmq-server/include/rabbit_cli.hrl
rename to deps/rabbit/include/rabbit_cli.hrl
index 737bb4ea3dae2c3b3079a5e07ca5e849beb45c01..53be9fcda07626d009754db2d9592522d79ae8ec 100644 (file)
 -define(RAM_OPT, "--ram").
 -define(OFFLINE_OPT, "--offline").
 -define(ONLINE_OPT, "--online").
+-define(LOCAL_OPT, "--local").
 
+-define(DECODE_OPT, "--decode").
+-define(CIPHER_OPT, "--cipher").
+-define(HASH_OPT, "--hash").
+-define(ITERATIONS_OPT, "--iterations").
+-define(LIST_CIPHERS_OPT, "--list-ciphers").
+-define(LIST_HASHES_OPT, "--list-hashes").
 
 -define(NODE_DEF(Node), {?NODE_OPT, {option, Node}}).
 -define(QUIET_DEF, {?QUIET_OPT, flag}).
 -define(VHOST_DEF, {?VHOST_OPT, {option, "/"}}).
--define(TIMEOUT_DEF, {?TIMEOUT_OPT, {option, "infinity"}}).
+-define(TIMEOUT_DEF, {?TIMEOUT_OPT, {option, use_default}}).
 
 -define(VERBOSE_DEF, {?VERBOSE_OPT, flag}).
 -define(MINIMAL_DEF, {?MINIMAL_OPT, flag}).
 -define(RAM_DEF, {?RAM_OPT, flag}).
 -define(OFFLINE_DEF, {?OFFLINE_OPT, flag}).
 -define(ONLINE_DEF, {?ONLINE_OPT, flag}).
+-define(LOCAL_DEF, {?LOCAL_OPT, flag}).
+-define(DECODE_DEF, {?DECODE_OPT, flag}).
+-define(CIPHER_DEF, {?CIPHER_OPT, {option, atom_to_list(rabbit_pbe:default_cipher())}}).
+-define(HASH_DEF, {?HASH_OPT, {option, atom_to_list(rabbit_pbe:default_hash())}}).
+-define(ITERATIONS_DEF, {?ITERATIONS_OPT, {option, integer_to_list(rabbit_pbe:default_iterations())}}).
+-define(LIST_CIPHERS_DEF, {?LIST_CIPHERS_OPT, flag}).
+-define(LIST_HASHES_DEF, {?LIST_HASHES_OPT, flag}).
 
--define(RPC_TIMEOUT, infinity).
+
+%% Subset of standartized exit codes from sysexits.h, see
+%% https://github.com/rabbitmq/rabbitmq-server/issues/396 for discussion.
+-define(EX_OK         ,  0).
+-define(EX_USAGE      , 64).  % Bad command-line arguments.
+-define(EX_DATAERR    , 65).  % Wrong data in command-line arguments.
+-define(EX_NOUSER     , 67).  % The user specified does not exist.
+-define(EX_UNAVAILABLE, 69).  % Could not connect to the target node.
+-define(EX_SOFTWARE   , 70).  % Failed to execute command.
+-define(EX_TEMPFAIL   , 75).  % Temporary error (e.g. something has timed out).
+-define(EX_CONFIG     , 78).  % Misconfiguration detected
diff --git a/deps/rabbit/quickcheck b/deps/rabbit/quickcheck
new file mode 100755 (executable)
index 0000000..59da371
--- /dev/null
@@ -0,0 +1,41 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -sname quickcheck
+-mode(compile).
+
+%% A helper to test quickcheck properties on a running broker
+%% NodeStr is a local broker node name
+%% ModStr is the module containing quickcheck properties
+%% TrialsStr is the number of trials
+main([NodeStr, ModStr, NumTestsStr, MaxSizeStr]) ->
+    {ok, Hostname} = inet:gethostname(),
+    Node = list_to_atom(NodeStr ++ "@" ++ Hostname),
+    Mod  = list_to_atom(ModStr),
+    NumTests = erlang:list_to_integer(NumTestsStr),
+    MaxSize = erlang:list_to_integer(MaxSizeStr),
+    case rpc:call(Node, code, ensure_loaded, [proper]) of
+        {module, proper} ->
+            case rpc:call(Node, proper, module,
+                          [Mod] ++ [[{numtests,         NumTests},
+                                     {max_size,         MaxSize},
+                                     {constraint_tries, 200}]]) of
+                [] -> ok;
+                R  -> io:format("~p.~n", [R]),
+                      quit(1)
+            end;
+        {badrpc, Reason} ->
+            io:format("Could not contact node ~p: ~p.~n", [Node, Reason]),
+            quit(2);
+        {error,nofile} ->
+            io:format("Module PropEr was not found on node ~p~n", [Node]),
+            quit(2)
+    end;
+main([]) ->
+    io:format("This script requires a node name and a module.~n").
+
+quit(Status) ->
+    case os:type() of
+        {unix,  _} -> halt(Status);
+        {win32, _} -> init:stop(Status)
+    end.
+
diff --git a/deps/rabbit/rabbitmq-components.mk b/deps/rabbit/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 86%
rename from rabbitmq-server/scripts/rabbitmq-defaults
rename to deps/rabbit/scripts/rabbitmq-defaults
index 26f6af7cff69cee780f332b6f4cade87d4836ec7..baffce80de4930d4fbcfd29f66e05d0c045e23b3 100755 (executable)
@@ -24,6 +24,12 @@ ERL_DIR=
 CLEAN_BOOT_FILE=start_clean
 SASL_BOOT_FILE=start_sasl
 
+if [ -f "${RABBITMQ_HOME}/erlang.mk" ]; then
+    # RabbitMQ is executed from its source directory. The plugins
+    # directory and ERL_LIBS are tuned based on this.
+    RABBITMQ_DEV_ENV=1
+fi
+
 ## Set default values
 
 BOOT_MODULE="rabbit"
@@ -34,6 +40,5 @@ MNESIA_BASE=${SYS_PREFIX}/var/lib/rabbitmq/mnesia
 ENABLED_PLUGINS_FILE=${SYS_PREFIX}/etc/rabbitmq/enabled_plugins
 
 PLUGINS_DIR="${RABBITMQ_HOME}/plugins"
-IO_THREAD_POOL_SIZE=64
 
 CONF_ENV_FILE=${SYS_PREFIX}/etc/rabbitmq/rabbitmq-env.conf
old mode 100755 (executable)
new mode 100644 (file)
similarity index 58%
rename from rabbitmq-server/scripts/rabbitmq-defaults.bat
rename to deps/rabbit/scripts/rabbitmq-defaults.bat
index d1e3b41..8fff5ea
@@ -1,37 +1,49 @@
-@echo off
-
-REM ### next line potentially updated in package install steps
-REM set SYS_PREFIX=
-
-REM ### next line will be updated when generating a standalone release
-REM ERL_DIR=
-set ERL_DIR=
-
-REM These boot files don't appear to be referenced in the batch scripts
-REM set CLEAN_BOOT_FILE=start_clean
-REM set SASL_BOOT_FILE=start_sasl
-
-REM ## Set default values
-
-if "!RABBITMQ_BASE!"=="" (
-    set RABBITMQ_BASE=!APPDATA!\RabbitMQ
-)
-
-REM BOOT_MODULE="rabbit"
-REM CONFIG_FILE=${SYS_PREFIX}/etc/rabbitmq/rabbitmq
-REM LOG_BASE=${SYS_PREFIX}/var/log/rabbitmq
-REM MNESIA_BASE=${SYS_PREFIX}/var/lib/rabbitmq/mnesia
-REM ENABLED_PLUGINS_FILE=${SYS_PREFIX}/etc/rabbitmq/enabled_plugins
-set BOOT_MODULE=rabbit
-set CONFIG_FILE=!RABBITMQ_BASE!\rabbitmq
-set LOG_BASE=!RABBITMQ_BASE!\log
-set MNESIA_BASE=!RABBITMQ_BASE!\db
-set ENABLED_PLUGINS_FILE=!RABBITMQ_BASE!\enabled_plugins
-
-REM PLUGINS_DIR="${RABBITMQ_HOME}/plugins"
-set PLUGINS_DIR=!TDP0!..\plugins
-
-REM CONF_ENV_FILE=${SYS_PREFIX}/etc/rabbitmq/rabbitmq-env.conf
-if "!RABBITMQ_CONF_ENV_FILE!"=="" (
-    set RABBITMQ_CONF_ENV_FILE=!RABBITMQ_BASE!\rabbitmq-env-conf.bat
-)
+@echo off\r
+\r
+REM ### next line potentially updated in package install steps\r
+REM set SYS_PREFIX=\r
+\r
+REM ### next line will be updated when generating a standalone release\r
+REM ERL_DIR=\r
+set ERL_DIR=\r
+\r
+REM These boot files don't appear to be referenced in the batch scripts\r
+REM set CLEAN_BOOT_FILE=start_clean\r
+REM set SASL_BOOT_FILE=start_sasl\r
+\r
+if exist "%RABBITMQ_HOME%\erlang.mk" (\r
+    REM RabbitMQ is executed from its source directory. The plugins\r
+    REM directory and ERL_LIBS are tuned based on this.\r
+    set RABBITMQ_DEV_ENV=1\r
+)\r
+\r
+REM ## Set default values\r
+\r
+if "!RABBITMQ_BASE!"=="" (\r
+    set RABBITMQ_BASE=!APPDATA!\RabbitMQ\r
+)\r
+\r
+REM Make sure $RABBITMQ_BASE contains no non-ASCII characters. We create\r
+REM the directory first so we don't end up creating it later in its "short\r
+REM filename" version.\r
+if not exist "!RABBITMQ_BASE!" (\r
+    mkdir "!RABBITMQ_BASE!"\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_BASE!") do set RABBITMQ_BASE=%%~sF\r
+\r
+REM BOOT_MODULE="rabbit"\r
+REM CONFIG_FILE=${SYS_PREFIX}/etc/rabbitmq/rabbitmq\r
+REM LOG_BASE=${SYS_PREFIX}/var/log/rabbitmq\r
+REM MNESIA_BASE=${SYS_PREFIX}/var/lib/rabbitmq/mnesia\r
+REM ENABLED_PLUGINS_FILE=${SYS_PREFIX}/etc/rabbitmq/enabled_plugins\r
+set BOOT_MODULE=rabbit\r
+set CONFIG_FILE=!RABBITMQ_BASE!\rabbitmq\r
+set LOG_BASE=!RABBITMQ_BASE!\log\r
+set MNESIA_BASE=!RABBITMQ_BASE!\db\r
+set ENABLED_PLUGINS_FILE=!RABBITMQ_BASE!\enabled_plugins\r
+\r
+REM PLUGINS_DIR="${RABBITMQ_HOME}/plugins"\r
+for /f "delims=" %%F in ("!TDP0!..\plugins") do set PLUGINS_DIR=%%~dpsF%%~nF%%~xF\r
+\r
+REM CONF_ENV_FILE=${SYS_PREFIX}/etc/rabbitmq/rabbitmq-env.conf\r
+set CONF_ENV_FILE=!RABBITMQ_BASE!\rabbitmq-env-conf.bat\r
old mode 100755 (executable)
new mode 100644 (file)
similarity index 91%
rename from rabbitmq-server/scripts/rabbitmq-echopid.bat
rename to deps/rabbit/scripts/rabbitmq-echopid.bat
index 6262a16..650fcc5
@@ -1,55 +1,55 @@
-@echo off
-
-REM Usage: rabbitmq-echopid.bat <rabbitmq_nodename>
-REM
-REM <rabbitmq_nodename> (s)name of the erlang node to connect to (required)
-
-setlocal
-
-set TDP0=%~dp0
-
-REM Get default settings with user overrides for (RABBITMQ_)<var_name>
-REM Non-empty defaults should be set in rabbitmq-env
-call "!TDP0!\rabbitmq-env.bat"
-
-if "%1"=="" goto fail
-
-:: set timeout vars ::
-set TIMEOUT=10
-set TIMER=1
-
-:: check that wmic exists ::
-set WMIC_PATH=%SYSTEMROOT%\System32\Wbem\wmic.exe
-if not exist "%WMIC_PATH%" (
-  goto fail
-)
-
-:getpid
-for /f "usebackq tokens=* skip=1" %%P IN (`%%WMIC_PATH%% process where "name='erl.exe' and commandline like '%%%RABBITMQ_NAME_TYPE% %1%%'" get processid 2^>nul`) do (
-  set PID=%%P
-  goto echopid
-)
-
-:echopid
-:: check for pid not found ::
-if "%PID%" == "" (
-  PING 127.0.0.1 -n 2 > nul
-  set /a TIMER+=1
-  if %TIMEOUT%==%TIMER% goto fail
-  goto getpid
-)
-
-:: show pid ::
-echo %PID%
-
-:: all done ::
-:ok
-endlocal
-EXIT /B 0
-
-:: something went wrong ::
-:fail
-endlocal
-EXIT /B 1
-
-
+@echo off\r
+\r
+REM Usage: rabbitmq-echopid.bat <rabbitmq_nodename>\r
+REM\r
+REM <rabbitmq_nodename> (s)name of the erlang node to connect to (required)\r
+\r
+setlocal\r
+\r
+set TDP0=%~dp0\r
+\r
+REM Get default settings with user overrides for (RABBITMQ_)<var_name>\r
+REM Non-empty defaults should be set in rabbitmq-env\r
+call "!TDP0!\rabbitmq-env.bat" %~n0\r
+\r
+if "%1"=="" goto fail\r
+\r
+:: set timeout vars ::\r
+set TIMEOUT=10\r
+set TIMER=1\r
+\r
+:: check that wmic exists ::\r
+set WMIC_PATH=%SYSTEMROOT%\System32\Wbem\wmic.exe\r
+if not exist "%WMIC_PATH%" (\r
+  goto fail\r
+)\r
+\r
+:getpid\r
+for /f "usebackq tokens=* skip=1" %%P IN (`%%WMIC_PATH%% process where "name='erl.exe' and commandline like '%%%RABBITMQ_NAME_TYPE% %1%%'" get processid 2^>nul`) do (\r
+  set PID=%%P\r
+  goto echopid\r
+)\r
+\r
+:echopid\r
+:: check for pid not found ::\r
+if "%PID%" == "" (\r
+  PING 127.0.0.1 -n 2 > nul\r
+  set /a TIMER+=1\r
+  if %TIMEOUT%==%TIMER% goto fail\r
+  goto getpid\r
+)\r
+\r
+:: show pid ::\r
+echo %PID%\r
+\r
+:: all done ::\r
+:ok\r
+endlocal\r
+EXIT /B 0\r
+\r
+:: something went wrong ::\r
+:fail\r
+endlocal\r
+EXIT /B 1\r
+\r
+\r
similarity index 52%
rename from rabbitmq-server/scripts/rabbitmq-env
rename to deps/rabbit/scripts/rabbitmq-env
index a5bf52ab6a5b95904e1b8aa5bde027bbeeaf813e..8c33e7c0b7ddfadb50d44204e8ea5ccccddf762e 100755 (executable)
 ##  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
 ##
 
-# We set +e here since since our test for "readlink -f" below needs to
-# be able to fail.
-set +e
-# Determine where this script is really located (if this script is
-# invoked from another script, this is the location of the caller)
-SCRIPT_PATH="$0"
-while [ -h "$SCRIPT_PATH" ] ; do
-    # Determine if readlink -f is supported at all. TODO clean this up.
-    FULL_PATH=`readlink -f $SCRIPT_PATH 2>/dev/null`
-    if [ "$?" != "0" ]; then
-      REL_PATH=`readlink $SCRIPT_PATH`
-      if expr "$REL_PATH" : '/.*' > /dev/null; then
-        SCRIPT_PATH="$REL_PATH"
-      else
-        SCRIPT_PATH="`dirname "$SCRIPT_PATH"`/$REL_PATH"
-      fi
+if [ "$RABBITMQ_ENV_LOADED" = 1 ]; then
+    return 0;
+fi
+
+if [ -z "$RABBITMQ_SCRIPTS_DIR" ]; then
+    # We set +e here since since our test for "readlink -f" below needs to
+    # be able to fail.
+    set +e
+    # Determine where this script is really located (if this script is
+    # invoked from another script, this is the location of the caller)
+    SCRIPT_PATH="$0"
+    while [ -h "$SCRIPT_PATH" ] ; do
+        # Determine if readlink -f is supported at all. TODO clean this up.
+        FULL_PATH=`readlink -f $SCRIPT_PATH 2>/dev/null`
+        if [ "$?" != "0" ]; then
+          REL_PATH=`readlink $SCRIPT_PATH`
+          if expr "$REL_PATH" : '/.*' > /dev/null; then
+            SCRIPT_PATH="$REL_PATH"
+          else
+            SCRIPT_PATH="`dirname "$SCRIPT_PATH"`/$REL_PATH"
+          fi
+        else
+          SCRIPT_PATH=$FULL_PATH
+        fi
+    done
+    set -e
+
+    RABBITMQ_SCRIPTS_DIR=`dirname $SCRIPT_PATH`
+fi
+
+rmq_realpath() {
+    local path=$1
+
+    if [ -d "$path" ]; then
+        cd "$path" && pwd
+    elif [ -f "$path" ]; then
+        cd "$(dirname "$path")" && echo $(pwd)/$(basename "$path")
     else
-      SCRIPT_PATH=$FULL_PATH
+        echo "$path"
     fi
-done
-set -e
+}
 
-SCRIPT_DIR=`dirname $SCRIPT_PATH`
-RABBITMQ_HOME="${SCRIPT_DIR}/.."
+RABBITMQ_HOME="$(rmq_realpath "${RABBITMQ_SCRIPTS_DIR}/..")"
 
 ## Set defaults
-. ${SCRIPT_DIR}/rabbitmq-defaults
+. ${RABBITMQ_SCRIPTS_DIR}/rabbitmq-defaults
 
-## Common defaults
-SERVER_ERL_ARGS="+P 1048576"
+DEFAULT_SCHEDULER_BIND_TYPE="db"
+[ "x" = "x$RABBITMQ_SCHEDULER_BIND_TYPE" ] && RABBITMQ_SCHEDULER_BIND_TYPE=${DEFAULT_SCHEDULER_BIND_TYPE}
 
-# warn about old rabbitmq.conf file, if no new one
-if [ -f /etc/rabbitmq/rabbitmq.conf ] && \
-   [ ! -f ${CONF_ENV_FILE} ] ; then
-    echo -n "WARNING: ignoring /etc/rabbitmq/rabbitmq.conf -- "
-    echo "location has moved to ${CONF_ENV_FILE}"
-fi
+DEFAULT_DISTRIBUTION_BUFFER_SIZE=32000
+[ "x" = "x$RABBITMQ_DISTRIBUTION_BUFFER_SIZE" ] && RABBITMQ_DISTRIBUTION_BUFFER_SIZE=${DEFAULT_DISTRIBUTION_BUFFER_SIZE}
+
+## Common defaults
+SERVER_ERL_ARGS="+P 1048576 +t 5000000 +stbt $RABBITMQ_SCHEDULER_BIND_TYPE +zdbbl $RABBITMQ_DISTRIBUTION_BUFFER_SIZE"
 
 # We save the current value of $RABBITMQ_PID_FILE in case it was set by
 # an init script. If $CONF_ENV_FILE overrides it again, we must ignore
@@ -59,7 +77,9 @@ fi
 saved_RABBITMQ_PID_FILE=$RABBITMQ_PID_FILE
 
 ## Get configuration variables from the configure environment file
-[ -f ${CONF_ENV_FILE} ] && . ${CONF_ENV_FILE} || true
+[ "x" = "x$RABBITMQ_CONF_ENV_FILE" ] && RABBITMQ_CONF_ENV_FILE=${CONF_ENV_FILE}
+
+[ -f ${RABBITMQ_CONF_ENV_FILE} ] && . ${RABBITMQ_CONF_ENV_FILE} || true
 
 if [ "$saved_RABBITMQ_PID_FILE" -a \
      "$saved_RABBITMQ_PID_FILE" != "$RABBITMQ_PID_FILE" ]; then
@@ -84,20 +104,37 @@ fi
 
 ##--- Set environment vars RABBITMQ_<var_name> to defaults if not set
 
-rmq_realpath() {
+rmq_normalize_path() {
     local path=$1
 
-    if [ -d "$path" ]; then
-        cd "$path" && pwd
-    elif [ -f "$path" ]; then
-        cd "$(dirname "$path")" && echo $(pwd)/$(basename "$path")
-    else
-        echo "$path"
-    fi
+    # Remove redundant slashes and strip a trailing slash
+    echo "$path" | sed -e 's#/\{2,\}#/#g' -e 's#/$##'
+}
+
+rmq_normalize_path_var() {
+    local var warning
+
+    local prefix="WARNING:"
+
+    for var in "$@"; do
+        local path=$(eval "echo \"\$$var\"")
+        case "$path" in
+        */)
+            warning=1
+            echo "$prefix Removing trailing slash from $var" 1>&2
+            ;;
+        esac
+
+        eval "$var=$(rmq_normalize_path "$path")"
+
+        if [ "x$warning" = "x1" ]; then
+            prefix="        "
+        fi
+    done
 }
 
 rmq_check_if_shared_with_mnesia() {
-    local var
+    local var warning
 
     local mnesia_dir=$(rmq_realpath "${RABBITMQ_MNESIA_DIR}")
     local prefix="WARNING:"
@@ -146,20 +183,35 @@ DEFAULT_NODE_PORT=5672
 [ "x" = "x$RABBITMQ_MNESIA_BASE" ] && RABBITMQ_MNESIA_BASE=${MNESIA_BASE}
 [ "x" = "x$RABBITMQ_SERVER_START_ARGS" ] && RABBITMQ_SERVER_START_ARGS=${SERVER_START_ARGS}
 [ "x" = "x$RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS" ] && RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=${SERVER_ADDITIONAL_ERL_ARGS}
+[ "x" = "x$RABBITMQ_SERVER_CODE_PATH" ] && RABBITMQ_SERVER_CODE_PATH=${SERVER_CODE_PATH}
 [ "x" = "x$RABBITMQ_MNESIA_DIR" ] && RABBITMQ_MNESIA_DIR=${MNESIA_DIR}
 [ "x" = "x$RABBITMQ_MNESIA_DIR" ] && RABBITMQ_MNESIA_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME}
+[ "x" = "x$RABBITMQ_IGNORE_SIGINT" ] && RABBITMQ_IGNORE_SIGINT="true"
+[ "xtrue" = "x$RABBITMQ_IGNORE_SIGINT" ] && RABBITMQ_IGNORE_SIGINT_FLAG="+B i"
+
+rmq_normalize_path_var \
+    RABBITMQ_CONFIG_FILE \
+    RABBITMQ_LOG_BASE \
+    RABBITMQ_MNESIA_BASE \
+    RABBITMQ_MNESIA_DIR
 
 [ "x" = "x$RABBITMQ_PID_FILE" ] && RABBITMQ_PID_FILE=${PID_FILE}
 [ "x" = "x$RABBITMQ_PID_FILE" ] && RABBITMQ_PID_FILE=${RABBITMQ_MNESIA_DIR}.pid
+rmq_normalize_path_var RABBITMQ_PID_FILE
 
 [ "x" = "x$RABBITMQ_BOOT_MODULE" ] && RABBITMQ_BOOT_MODULE=${BOOT_MODULE}
 
 [ "x" = "x$RABBITMQ_PLUGINS_EXPAND_DIR" ] && RABBITMQ_PLUGINS_EXPAND_DIR=${PLUGINS_EXPAND_DIR}
 [ "x" = "x$RABBITMQ_PLUGINS_EXPAND_DIR" ] && RABBITMQ_PLUGINS_EXPAND_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME}-plugins-expand
+rmq_normalize_path_var RABBITMQ_PLUGINS_EXPAND_DIR
 
+[ "x" != "x$RABBITMQ_ENABLED_PLUGINS_FILE" ] && RABBITMQ_ENABLED_PLUGINS_FILE_source=environment
 [ "x" = "x$RABBITMQ_ENABLED_PLUGINS_FILE" ] && RABBITMQ_ENABLED_PLUGINS_FILE=${ENABLED_PLUGINS_FILE}
+rmq_normalize_path_var RABBITMQ_ENABLED_PLUGINS_FILE
 
+[ "x" != "x$RABBITMQ_PLUGINS_DIR" ] && RABBITMQ_PLUGINS_DIR_source=environment
 [ "x" = "x$RABBITMQ_PLUGINS_DIR" ] && RABBITMQ_PLUGINS_DIR=${PLUGINS_DIR}
+rmq_normalize_path_var RABBITMQ_PLUGINS_DIR
 
 ## Log rotation
 [ "x" = "x$RABBITMQ_LOGS" ] && RABBITMQ_LOGS=${LOGS}
@@ -167,6 +219,10 @@ DEFAULT_NODE_PORT=5672
 [ "x" = "x$RABBITMQ_SASL_LOGS" ] && RABBITMQ_SASL_LOGS=${SASL_LOGS}
 [ "x" = "x$RABBITMQ_SASL_LOGS" ] && RABBITMQ_SASL_LOGS="${RABBITMQ_LOG_BASE}/${RABBITMQ_NODENAME}-sasl.log"
 
+rmq_normalize_path_var \
+    RABBITMQ_LOGS \
+    RABBITMQ_SASL_LOGS
+
 [ "x" = "x$RABBITMQ_CTL_ERL_ARGS" ] && RABBITMQ_CTL_ERL_ARGS=${CTL_ERL_ARGS}
 
 # Check if files and directories non-related to Mnesia are configured
@@ -186,5 +242,67 @@ rmq_check_if_shared_with_mnesia \
 
 ##--- End of overridden <var_name> variables
 
+## Development-specific environment.
+if [ "${RABBITMQ_DEV_ENV}" ]; then
+    if [ "$(basename "$0")" = 'rabbitmq-plugins' -a \( \
+         "$RABBITMQ_PLUGINS_DIR_source" != 'environment' -o \
+         "$RABBITMQ_ENABLED_PLUGINS_FILE_source" != 'environment' \) ]; then
+        # We need to query the running node for the plugins directory
+        # and the "enabled plugins" file.
+        eval $( (${RABBITMQ_SCRIPTS_DIR}/rabbitmqctl eval \
+            '{ok, P} = application:get_env(rabbit, plugins_dir),
+             {ok, E} = application:get_env(rabbit, enabled_plugins_file),
+             io:format(
+               "plugins_dir=\"~s\"~n"
+               "enabled_plugins_file=\"~s\"~n", [P, E]).' \
+            2>/dev/null | head -n 2) || :)
+        if [ "${plugins_dir}" -a \
+             "$RABBITMQ_PLUGINS_DIR_source" != 'environment' ]; then
+            RABBITMQ_PLUGINS_DIR="${plugins_dir}"
+        fi
+        if [ "${enabled_plugins_file}" -a \
+             "$RABBITMQ_ENABLED_PLUGINS_FILE_source" != 'environment' ]; then
+            RABBITMQ_ENABLED_PLUGINS_FILE="${enabled_plugins_file}"
+        fi
+    fi
+
+    if [ -d "${RABBITMQ_PLUGINS_DIR}" ]; then
+        # RabbitMQ was started with "make run-broker" from its own
+        # source tree. Take rabbit_common from the plugins directory.
+        ERL_LIBS="${RABBITMQ_PLUGINS_DIR}:${ERL_LIBS}"
+    else
+        # RabbitMQ runs from a testsuite or a plugin. The .ez files are
+        # not available under RabbitMQ source tree. We need to look at
+        # $DEPS_DIR and default locations.
+
+        if [ "${DEPS_DIR}" -a -d "${DEPS_DIR}/rabbit_common/ebin" ]; then
+            # $DEPS_DIR is set, and it contains rabbitmq-common, use
+            # this.
+            DEPS_DIR_norm="${DEPS_DIR}"
+        elif [ -f "${RABBITMQ_SCRIPTS_DIR}/../../../erlang.mk" -a \
+               -d "${RABBITMQ_SCRIPTS_DIR}/../../rabbit_common/ebin" ]; then
+            # Look at default locations: "deps" subdirectory inside a
+            # plugin or the Umbrella.
+            DEPS_DIR_norm="${RABBITMQ_SCRIPTS_DIR}/../.."
+        fi
+        DEPS_DIR_norm=$(rmq_realpath "${DEPS_DIR_norm}")
+
+        ERL_LIBS="${DEPS_DIR_norm}:${ERL_LIBS}"
+    fi
+else
+    if [ -d "${RABBITMQ_PLUGINS_DIR}" ]; then
+        # RabbitMQ was started from its install directory. Take
+        # rabbit_common from the plugins directory.
+        ERL_LIBS="${RABBITMQ_PLUGINS_DIR}:${ERL_LIBS}"
+    fi
+fi
+
+ERL_LIBS=${ERL_LIBS%:}
+if [ "$ERL_LIBS" ]; then
+    export ERL_LIBS
+fi
+
+RABBITMQ_ENV_LOADED=1
+
 # Since we source this elsewhere, don't accidentally stop execution
 true
diff --git a/deps/rabbit/scripts/rabbitmq-env.bat b/deps/rabbit/scripts/rabbitmq-env.bat
new file mode 100644 (file)
index 0000000..3c84351
--- /dev/null
@@ -0,0 +1,428 @@
+@echo off\r
+\r
+REM Scopes the variables to the current batch file\r
+REM setlocal\r
+\r
+rem Preserve values that might contain exclamation marks before\r
+rem enabling delayed expansion\r
+set TDP0=%~dp0\r
+REM setlocal enabledelayedexpansion\r
+\r
+REM SCRIPT_DIR=`dirname $SCRIPT_PATH`\r
+REM RABBITMQ_HOME="${SCRIPT_DIR}/.."\r
+set SCRIPT_DIR=%TDP0%\r
+set SCRIPT_NAME=%1\r
+for /f "delims=" %%F in ("%SCRIPT_DIR%..") do set RABBITMQ_HOME=%%~dpsF%%~nF%%~xF\r
+\r
+REM If ERLANG_HOME is not defined, check if "erl.exe" is available in\r
+REM the path and use that.\r
+if not defined ERLANG_HOME (\r
+    for /f "delims=" %%F in ('where.exe erl.exe') do @set ERL_PATH=%%F\r
+    if exist "!ERL_PATH!" (\r
+        for /f "delims=" %%F in ("!ERL_PATH!") do set ERL_DIRNAME=%%~dpF\r
+        for /f "delims=" %%F in ("!ERL_DIRNAME!\..") do @set ERLANG_HOME=%%~dpsF%%~nF%%~xF\r
+    )\r
+    set ERL_PATH=\r
+    set ERL_DIRNAME=\r
+)\r
+\r
+REM ## Set defaults\r
+REM . ${SCRIPT_DIR}/rabbitmq-defaults\r
+call "%SCRIPT_DIR%\rabbitmq-defaults.bat"\r
+\r
+set DEFAULT_SCHEDULER_BIND_TYPE=db\r
+\r
+REM [ "x" = "x$RABBITMQ_SCHEDULER_BIND_TYPE" ] && RABBITMQ_SCHEDULER_BIND_TYPE=${DEFAULT_SCHEDULER_BIND_TYPE}\r
+REM set the default scheduling bind type\r
+if "!RABBITMQ_SCHEDULER_BIND_TYPE!"=="" (\r
+    set RABBITMQ_SCHEDULER_BIND_TYPE=!DEFAULT_SCHEDULER_BIND_TYPE!\r
+)\r
+\r
+REM DEFAULT_DISTRIBUTION_BUFFER_SIZE=32000\r
+REM set the VM distribution buffer size\r
+REM [ "x" = "x$RABBITMQ_DISTRIBUTION_BUFFER_SIZE" ] && RABBITMQ_DISTRIBUTION_BUFFER_SIZE=${DEFAULT_DISTRIBUTION_BUFFER_SIZE}\r
+set DEFAULT_DISTRIBUTION_BUFFER_SIZE=32000\r
+if "!RABBITMQ_DISTRIBUTION_BUFFER_SIZE!"=="" (\r
+    set RABBITMQ_DISTRIBUTION_BUFFER_SIZE=!DEFAULT_DISTRIBUTION_BUFFER_SIZE!\r
+)\r
+\r
+REM # warn about old rabbitmq.conf file, if no new one\r
+REM if [ -f /etc/rabbitmq/rabbitmq.conf ] && \\r
+REM    [ ! -f ${CONF_ENV_FILE} ] ; then\r
+REM     echo -n "WARNING: ignoring /etc/rabbitmq/rabbitmq.conf -- "\r
+REM     echo "location has moved to ${CONF_ENV_FILE}"\r
+REM fi\r
+\r
+REM Common defaults\r
+set SERVER_ERL_ARGS=+P 1048576 +t 5000000 +stbt !RABBITMQ_SCHEDULER_BIND_TYPE! +zdbbl !RABBITMQ_DISTRIBUTION_BUFFER_SIZE!\r
+\r
+REM ## Get configuration variables from the configure environment file\r
+REM [ -f ${CONF_ENV_FILE} ] && . ${CONF_ENV_FILE} || true\r
+if "!RABBITMQ_CONF_ENV_FILE!"=="" (\r
+    set RABBITMQ_CONF_ENV_FILE=!CONF_ENV_FILE!\r
+)\r
+\r
+if exist "!RABBITMQ_CONF_ENV_FILE!" (\r
+    call "!RABBITMQ_CONF_ENV_FILE!"\r
+)\r
+\r
+REM Make sure $RABBITMQ_BASE contains no non-ASCII characters.\r
+if not exist "!RABBITMQ_BASE!" (\r
+    mkdir "!RABBITMQ_BASE!"\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_BASE!") do set RABBITMQ_BASE=%%~sF\r
+\r
+REM Check for the short names here too\r
+if "!RABBITMQ_USE_LONGNAME!"=="" (\r
+    if "!USE_LONGNAME!"=="" (\r
+        set RABBITMQ_NAME_TYPE="-sname"\r
+        set NAMETYPE=shortnames\r
+    )\r
+)\r
+\r
+if "!RABBITMQ_USE_LONGNAME!"=="true" (\r
+    if "!USE_LONGNAME!"=="true" (\r
+        set RABBITMQ_NAME_TYPE="-name"\r
+        set NAMETYPE=longnames\r
+    )\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_NODENAME" ] && RABBITMQ_NODENAME=${NODENAME}\r
+if "!RABBITMQ_NODENAME!"=="" (\r
+    if "!NODENAME!"=="" (\r
+        REM We use Erlang to query the local hostname because\r
+        REM !COMPUTERNAME! and Erlang may return different results.\r
+       REM Start erl with -sname to make sure epmd is started.\r
+       call "%ERLANG_HOME%\bin\erl.exe" -A0 -noinput -boot start_clean -sname rabbit-prelaunch-epmd -eval "init:stop()." >nul 2>&1\r
+        for /f "delims=" %%F in ('call "%ERLANG_HOME%\bin\erl.exe" -A0 -noinput -boot start_clean -eval "net_kernel:start([list_to_atom(""rabbit-gethostname-"" ++ os:getpid()), %NAMETYPE%]), [_, H] = string:tokens(atom_to_list(node()), ""@""), io:format(""~s~n"", [H]), init:stop()."') do @set HOSTNAME=%%F\r
+        set RABBITMQ_NODENAME=rabbit@!HOSTNAME!\r
+        set HOSTNAME=\r
+    ) else (\r
+        set RABBITMQ_NODENAME=!NODENAME!\r
+    )\r
+)\r
+set NAMETYPE=\r
+\r
+REM\r
+REM ##--- Set environment vars RABBITMQ_<var_name> to defaults if not set\r
+REM\r
+REM DEFAULT_NODE_IP_ADDRESS=auto\r
+REM DEFAULT_NODE_PORT=5672\r
+REM [ "x" = "x$RABBITMQ_NODE_IP_ADDRESS" ] && RABBITMQ_NODE_IP_ADDRESS=${NODE_IP_ADDRESS}\r
+REM [ "x" = "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_NODE_PORT=${NODE_PORT}\r
+REM [ "x" = "x$RABBITMQ_NODE_IP_ADDRESS" ] && [ "x" != "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_NODE_IP_ADDRESS=${DEFAULT_NODE_IP_ADDRESS}\r
+REM [ "x" != "x$RABBITMQ_NODE_IP_ADDRESS" ] && [ "x" = "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_NODE_PORT=${DEFAULT_NODE_PORT}\r
+\r
+REM if "!RABBITMQ_NODE_IP_ADDRESS!"=="" (\r
+REM    if not "!RABBITMQ_NODE_PORT!"=="" (\r
+REM       set RABBITMQ_NODE_IP_ADDRESS=auto\r
+REM    )\r
+REM ) else (\r
+REM    if "!RABBITMQ_NODE_PORT!"=="" (\r
+REM       set RABBITMQ_NODE_PORT=5672\r
+REM    )\r
+REM )\r
+\r
+if "!RABBITMQ_NODE_IP_ADDRESS!"=="" (\r
+    if not "!NODE_IP_ADDRESS!"=="" (\r
+        set RABBITMQ_NODE_IP_ADDRESS=!NODE_IP_ADDRESS!\r
+    )\r
+)\r
+\r
+if "!RABBITMQ_NODE_PORT!"=="" (\r
+    if not "!NODE_PORT!"=="" (\r
+        set RABBITMQ_NODE_PORT=!NODE_PORT!\r
+    )\r
+)\r
+\r
+if "!RABBITMQ_NODE_IP_ADDRESS!"=="" (\r
+    if not "!RABBITMQ_NODE_PORT!"=="" (\r
+       set RABBITMQ_NODE_IP_ADDRESS=auto\r
+    )\r
+) else (\r
+    if "!RABBITMQ_NODE_PORT!"=="" (\r
+       set RABBITMQ_NODE_PORT=5672\r
+    )\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_DIST_PORT" ] && RABBITMQ_DIST_PORT=${DIST_PORT}\r
+REM [ "x" = "x$RABBITMQ_DIST_PORT" ] && [ "x" = "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_DIST_PORT=$((${DEFAULT_NODE_PORT} + 20000))\r
+REM [ "x" = "x$RABBITMQ_DIST_PORT" ] && [ "x" != "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_DIST_PORT=$((${RABBITMQ_NODE_PORT} + 20000))\r
+\r
+if "!RABBITMQ_DIST_PORT!"=="" (\r
+    if "!DIST_PORT!"=="" (\r
+        if "!RABBITMQ_NODE_PORT!"=="" (\r
+            set RABBITMQ_DIST_PORT=25672\r
+        ) else (\r
+            set /a RABBITMQ_DIST_PORT=20000+!RABBITMQ_NODE_PORT!\r
+        )\r
+    ) else (\r
+        set RABBITMQ_DIST_PORT=!DIST_PORT!\r
+    )\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_SERVER_ERL_ARGS" ] && RABBITMQ_SERVER_ERL_ARGS=${SERVER_ERL_ARGS}\r
+if "!RABBITMQ_SERVER_ERL_ARGS!"=="" (\r
+    set RABBITMQ_SERVER_ERL_ARGS=!SERVER_ERL_ARGS!\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_CONFIG_FILE" ] && RABBITMQ_CONFIG_FILE=${CONFIG_FILE}\r
+if "!RABBITMQ_CONFIG_FILE!"=="" (\r
+    if "!CONFIG_FILE!"=="" (\r
+        set RABBITMQ_CONFIG_FILE=!RABBITMQ_BASE!\rabbitmq\r
+    ) else (\r
+        set RABBITMQ_CONFIG_FILE=!CONFIG_FILE!\r
+    )\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_LOG_BASE" ] && RABBITMQ_LOG_BASE=${LOG_BASE}\r
+if "!RABBITMQ_LOG_BASE!"=="" (\r
+    if "!LOG_BASE!"=="" (\r
+        set RABBITMQ_LOG_BASE=!RABBITMQ_BASE!\log\r
+    ) else (\r
+        set RABBITMQ_LOG_BASE=!LOG_BASE!\r
+    )\r
+)\r
+if not exist "!RABBITMQ_LOG_BASE!" (\r
+    mkdir "!RABBITMQ_LOG_BASE!"\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_LOG_BASE!") do set RABBITMQ_LOG_BASE=%%~sF\r
+\r
+REM [ "x" = "x$RABBITMQ_MNESIA_BASE" ] && RABBITMQ_MNESIA_BASE=${MNESIA_BASE}\r
+if "!RABBITMQ_MNESIA_BASE!"=="" (\r
+    if "!MNESIA_BASE!"=="" (\r
+        set RABBITMQ_MNESIA_BASE=!RABBITMQ_BASE!\db\r
+    ) else (\r
+        set RABBITMQ_MNESIA_BASE=!MNESIA_BASE!\r
+    )\r
+)\r
+if not exist "!RABBITMQ_MNESIA_BASE!" (\r
+    mkdir "!RABBITMQ_MNESIA_BASE!"\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_MNESIA_BASE!") do set RABBITMQ_MNESIA_BASE=%%~sF\r
+\r
+REM [ "x" = "x$RABBITMQ_SERVER_START_ARGS" ] && RABBITMQ_SERVER_START_ARGS=${SERVER_START_ARGS}\r
+REM No Windows equivalent\r
+\r
+REM [ "x" = "x$RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS" ] && RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=${SERVER_ADDITIONAL_ERL_ARGS}\r
+REM No Windows equivalent\r
+\r
+REM [ "x" = "x$RABBITMQ_MNESIA_DIR" ] && RABBITMQ_MNESIA_DIR=${MNESIA_DIR}\r
+REM [ "x" = "x$RABBITMQ_MNESIA_DIR" ] && RABBITMQ_MNESIA_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME}\r
+if "!RABBITMQ_MNESIA_DIR!"=="" (\r
+    if "!MNESIA_DIR!"=="" (\r
+        set RABBITMQ_MNESIA_DIR=!RABBITMQ_MNESIA_BASE!\!RABBITMQ_NODENAME!-mnesia\r
+    ) else (\r
+        set RABBITMQ_MNESIA_DIR=!MNESIA_DIR!\r
+    )\r
+)\r
+if not exist "!RABBITMQ_MNESIA_DIR!" (\r
+    mkdir "!RABBITMQ_MNESIA_DIR!"\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_MNESIA_DIR!") do set RABBITMQ_MNESIA_DIR=%%~sF\r
+\r
+REM [ "x" = "x$RABBITMQ_PID_FILE" ] && RABBITMQ_PID_FILE=${PID_FILE}\r
+REM [ "x" = "x$RABBITMQ_PID_FILE" ] && RABBITMQ_PID_FILE=${RABBITMQ_MNESIA_DIR}.pid\r
+REM No Windows equivalent\r
+\r
+REM [ "x" = "x$RABBITMQ_BOOT_MODULE" ] && RABBITMQ_BOOT_MODULE=${BOOT_MODULE}\r
+if "!RABBITMQ_BOOT_MODULE!"=="" (\r
+    if "!BOOT_MODULE!"=="" (\r
+        set RABBITMQ_BOOT_MODULE=rabbit\r
+    ) else (\r
+        set RABBITMQ_BOOT_MODULE=!BOOT_MODULE!\r
+    )\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_PLUGINS_EXPAND_DIR" ] && RABBITMQ_PLUGINS_EXPAND_DIR=${PLUGINS_EXPAND_DIR}\r
+REM [ "x" = "x$RABBITMQ_PLUGINS_EXPAND_DIR" ] && RABBITMQ_PLUGINS_EXPAND_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME}-plugins-expand\r
+if "!RABBITMQ_PLUGINS_EXPAND_DIR!"=="" (\r
+    if "!PLUGINS_EXPAND_DIR!"=="" (\r
+        set RABBITMQ_PLUGINS_EXPAND_DIR=!RABBITMQ_MNESIA_BASE!\!RABBITMQ_NODENAME!-plugins-expand\r
+    ) else (\r
+        set RABBITMQ_PLUGINS_EXPAND_DIR=!PLUGINS_EXPAND_DIR!\r
+    )\r
+)\r
+REM FIXME: RabbitMQ removes and recreates RABBITMQ_PLUGINS_EXPAND_DIR\r
+REM itself. Therefore we can't create it here in advance and escape the\r
+REM directory name, and RABBITMQ_PLUGINS_EXPAND_DIR must not contain\r
+REM non-US-ASCII characters.\r
+\r
+REM [ "x" = "x$RABBITMQ_ENABLED_PLUGINS_FILE" ] && RABBITMQ_ENABLED_PLUGINS_FILE=${ENABLED_PLUGINS_FILE}\r
+if "!RABBITMQ_ENABLED_PLUGINS_FILE!"=="" (\r
+    if "!ENABLED_PLUGINS_FILE!"=="" (\r
+        set RABBITMQ_ENABLED_PLUGINS_FILE=!RABBITMQ_BASE!\enabled_plugins\r
+    ) else (\r
+        set RABBITMQ_ENABLED_PLUGINS_FILE=!ENABLED_PLUGINS_FILE!\r
+    )\r
+) else (\r
+    set RABBITMQ_ENABLED_PLUGINS_FILE_source=environment\r
+)\r
+if not exist "!RABBITMQ_ENABLED_PLUGINS_FILE!" (\r
+    for /f "delims=" %%F in ("!RABBITMQ_ENABLED_PLUGINS_FILE!") do mkdir %%~dpF 2>NUL\r
+    copy /y NUL "!RABBITMQ_ENABLED_PLUGINS_FILE!" >NUL\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_ENABLED_PLUGINS_FILE!") do set RABBITMQ_ENABLED_PLUGINS_FILE=%%~sF\r
+\r
+REM [ "x" = "x$RABBITMQ_PLUGINS_DIR" ] && RABBITMQ_PLUGINS_DIR=${PLUGINS_DIR}\r
+if "!RABBITMQ_PLUGINS_DIR!"=="" (\r
+    if "!PLUGINS_DIR!"=="" (\r
+        set RABBITMQ_PLUGINS_DIR=!RABBITMQ_HOME!\plugins\r
+    ) else (\r
+        set RABBITMQ_PLUGINS_DIR=!PLUGINS_DIR!\r
+    )\r
+) else (\r
+    set RABBITMQ_PLUGINS_DIR_source=environment\r
+)\r
+if not exist "!RABBITMQ_PLUGINS_DIR!" (\r
+    mkdir "!RABBITMQ_PLUGINS_DIR!"\r
+)\r
+for /f "delims=" %%F in ("!RABBITMQ_PLUGINS_DIR!") do set RABBITMQ_PLUGINS_DIR=%%~sF\r
+\r
+REM ## Log rotation\r
+REM [ "x" = "x$RABBITMQ_LOGS" ] && RABBITMQ_LOGS=${LOGS}\r
+REM [ "x" = "x$RABBITMQ_LOGS" ] && RABBITMQ_LOGS="${RABBITMQ_LOG_BASE}/${RABBITMQ_NODENAME}.log"\r
+if "!RABBITMQ_LOGS!"=="" (\r
+    if "!LOGS!"=="" (\r
+        set RABBITMQ_LOGS=!RABBITMQ_LOG_BASE!\!RABBITMQ_NODENAME!.log\r
+    ) else (\r
+        set RABBITMQ_LOGS=!LOGS!\r
+    )\r
+)\r
+if not "!RABBITMQ_LOGS" == "-" (\r
+    if not exist "!RABBITMQ_LOGS!" (\r
+        for /f "delims=" %%F in ("!RABBITMQ_LOGS!") do mkdir %%~dpF 2>NUL\r
+        copy /y NUL "!RABBITMQ_LOGS!" >NUL\r
+    )\r
+    for /f "delims=" %%F in ("!RABBITMQ_LOGS!") do set RABBITMQ_LOGS=%%~sF\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_SASL_LOGS" ] && RABBITMQ_SASL_LOGS=${SASL_LOGS}\r
+REM [ "x" = "x$RABBITMQ_SASL_LOGS" ] && RABBITMQ_SASL_LOGS="${RABBITMQ_LOG_BASE}/${RABBITMQ_NODENAME}-sasl.log"\r
+if "!RABBITMQ_SASL_LOGS!"=="" (\r
+    if "!SASL_LOGS!"=="" (\r
+        set RABBITMQ_SASL_LOGS=!RABBITMQ_LOG_BASE!\!RABBITMQ_NODENAME!-sasl.log\r
+    ) else (\r
+        set RABBITMQ_SASL_LOGS=!SASL_LOGS!\r
+    )\r
+)\r
+if not "!RABBITMQ_SASL_LOGS" == "-" (\r
+    if not exist "!RABBITMQ_SASL_LOGS!" (\r
+        for /f "delims=" %%F in ("!RABBITMQ_SASL_LOGS!") do mkdir %%~dpF 2>NUL\r
+        copy /y NUL "!RABBITMQ_SASL_LOGS!" >NUL\r
+    )\r
+    for /f "delims=" %%F in ("!RABBITMQ_SASL_LOGS!") do set RABBITMQ_SASL_LOGS=%%~sF\r
+)\r
+\r
+REM [ "x" = "x$RABBITMQ_CTL_ERL_ARGS" ] && RABBITMQ_CTL_ERL_ARGS=${CTL_ERL_ARGS}\r
+if "!$RABBITMQ_CTL_ERL_ARGS!"=="" (\r
+    if not "!CTL_ERL_ARGS!"=="" (\r
+        set RABBITMQ_CTL_ERL_ARGS=!CTL_ERL_ARGS!\r
+    )\r
+)\r
+\r
+REM ADDITIONAL WINDOWS ONLY CONFIG ITEMS\r
+REM rabbitmq-plugins.bat\r
+REM if "!RABBITMQ_SERVICENAME!"=="" (\r
+REM     set RABBITMQ_SERVICENAME=RabbitMQ\r
+REM )\r
+\r
+if "!RABBITMQ_SERVICENAME!"=="" (\r
+    if "!SERVICENAME!"=="" (\r
+        set RABBITMQ_SERVICENAME=RabbitMQ\r
+    ) else (\r
+        set RABBITMQ_SERVICENAME=!SERVICENAME!\r
+    )\r
+)\r
+\r
+REM Development-specific environment.\r
+if defined RABBITMQ_DEV_ENV (\r
+    if "!SCRIPT_NAME!" == "rabbitmq-plugins" (\r
+        REM We may need to query the running node for the plugins directory\r
+        REM and the "enabled plugins" file.\r
+        if not "%RABBITMQ_PLUGINS_DIR_source%" == "environment" (\r
+            for /f "delims=" %%F in ('!SCRIPT_DIR!\rabbitmqctl eval "{ok, P} = application:get_env(rabbit, plugins_dir), io:format(""~s~n"", [P])."') do @set plugins_dir=%%F\r
+            if exist "!plugins_dir!" (\r
+                set RABBITMQ_PLUGINS_DIR=!plugins_dir!\r
+            )\r
+            REM set plugins_dir=\r
+        )\r
+        if not "%RABBITMQ_ENABLED_PLUGINS_FILE_source%" == "environment" (\r
+            for /f "delims=" %%F in ('!SCRIPT_DIR!\rabbitmqctl eval "{ok, P} = application:get_env(rabbit, enabled_plugins_file), io:format(""~s~n"", [P])."') do @set enabled_plugins_file=%%F\r
+            if exist "!enabled_plugins_file!" (\r
+                set RABBITMQ_ENABLED_PLUGINS_FILE=!enabled_plugins_file!\r
+            )\r
+            REM set enabled_plugins_file=\r
+        )\r
+    )\r
+\r
+    if exist "!RABBITMQ_PLUGINS_DIR!" (\r
+        REM RabbitMQ was started with "make run-broker" from its own\r
+        REM source tree. Take rabbit_common from the plugins directory.\r
+        set ERL_LIBS=!RABBITMQ_PLUGINS_DIR!;!ERL_LIBS!\r
+    ) else (\r
+        REM RabbitMQ runs from a testsuite or a plugin. The .ez files are\r
+        REM not available under RabbitMQ source tree. We need to look at\r
+        REM $DEPS_DIR and default locations.\r
+\r
+        if "!DEPS_DIR!" == "" (\r
+            if exist "!RABBITMQ_HOME!\..\..\deps\rabbit_common\erlang.mk" (\r
+                REM Dependencies in the Umbrella or a plugin.\r
+                set DEPS_DIR_norm="!RABBITMQ_HOME!\..\..\deps"\r
+            ) else (\r
+                if exist "!RABBITMQ_HOME!\deps\rabbit_common\erlang.mk" (\r
+                    REM Dependencies in the broker.\r
+                    set DEPS_DIR_norm="!RABBITMQ_HOME!\deps"\r
+                )\r
+            )\r
+        ) else (\r
+            for /f "delims=" %%F in ("!DEPS_DIR!") do @set DEPS_DIR_norm=%%~dpsF%%~nF%%~xF\r
+        )\r
+\r
+        set ERL_LIBS=!DEPS_DIR_norm!;!ERL_LIBS!\r
+    )\r
+) else (\r
+    if exist "!RABBITMQ_PLUGINS_DIR!" (\r
+        REM RabbitMQ was started from its install directory. Take\r
+        REM rabbit_common from the plugins directory.\r
+        set ERL_LIBS=!RABBITMQ_PLUGINS_DIR!;!ERL_LIBS!\r
+    )\r
+)\r
+\r
+REM Ensure all paths in ERL_LIBS do not contains non-ASCII characters.\r
+set ERL_LIBS_orig=%ERL_LIBS%\r
+set ERL_LIBS=\r
+call :filter_paths "%ERL_LIBS_orig%"\r
+goto :filter_paths_done\r
+\r
+:filter_paths\r
+set paths=%1\r
+set paths=%paths:"=%\r
+for /f "tokens=1* delims=;" %%a in ("%paths%") do (\r
+    if not "%%a" == "" call :filter_path "%%a"\r
+    if not "%%b" == "" call :filter_paths "%%b"\r
+)\r
+set paths=\r
+exit /b\r
+\r
+:filter_path\r
+set ERL_LIBS=%ERL_LIBS%;%~dps1%~n1%~x1\r
+exit /b\r
+\r
+:filter_paths_done\r
+\r
+REM Environment cleanup\r
+set BOOT_MODULE=\r
+set CONFIG_FILE=\r
+set ENABLED_PLUGINS_FILE=\r
+set LOG_BASE=\r
+set MNESIA_BASE=\r
+set PLUGINS_DIR=\r
+set SCRIPT_DIR=\r
+set SCRIPT_NAME=\r
+set TDP0=\r
+\r
+REM ##--- End of overridden <var_name> variables\r
+REM\r
+REM # Since we source this elsewhere, don't accidentally stop execution\r
+REM true\r
similarity index 87%
rename from rabbitmq-server/scripts/rabbitmq-plugins
rename to deps/rabbit/scripts/rabbitmq-plugins
index fb88bcc68d325b50afd7d95e6f15a0d5af38eb10..d72df8ad86485ee6712b9e425cb97767820c5b0a 100755 (executable)
 # Non-empty defaults should be set in rabbitmq-env
 . `dirname $0`/rabbitmq-env
 
+# Disable erl_crash.dump by default for control scripts.
+if [ -z "$ERL_CRASH_DUMP_SECONDS" ]; then
+    export ERL_CRASH_DUMP_SECONDS=0
+fi
+
 RABBITMQ_USE_LONGNAME=${RABBITMQ_USE_LONGNAME} \
 exec ${ERL_DIR}erl \
     -pa "${RABBITMQ_HOME}/ebin" \
     -noinput \
     -hidden \
-    ${RABBITMQ_PLUGINS_ERL_ARGS} \
+    ${RABBITMQ_CTL_ERL_ARGS} \
     -boot "${CLEAN_BOOT_FILE}" \
     -s rabbit_plugins_main \
     -enabled_plugins_file "$RABBITMQ_ENABLED_PLUGINS_FILE" \
old mode 100755 (executable)
new mode 100644 (file)
similarity index 86%
rename from rabbitmq-server/scripts/rabbitmq-plugins.bat
rename to deps/rabbit/scripts/rabbitmq-plugins.bat
index 6fb2f4f..c270d5d
@@ -26,7 +26,7 @@ setlocal enabledelayedexpansion
 \r
 REM Get default settings with user overrides for (RABBITMQ_)<var_name>\r
 REM Non-empty defaults should be set in rabbitmq-env\r
-call "!TDP0!\rabbitmq-env.bat"\r
+call "!TDP0!\rabbitmq-env.bat" %~n0\r
 \r
 if not exist "!ERLANG_HOME!\bin\erl.exe" (\r
     echo.\r
@@ -40,8 +40,13 @@ if not exist "!ERLANG_HOME!\bin\erl.exe" (
     exit /B 1\r
 )\r
 \r
+REM Disable erl_crash.dump by default for control scripts.\r
+if not defined ERL_CRASH_DUMP_SECONDS (\r
+    set ERL_CRASH_DUMP_SECONDS=0\r
+)\r
+\r
 "!ERLANG_HOME!\bin\erl.exe" ^\r
--pa "!TDP0!..\ebin" ^\r
+-pa "!RABBITMQ_HOME!\ebin" ^\r
 -noinput ^\r
 -hidden ^\r
 !RABBITMQ_CTL_ERL_ARGS! ^\r
similarity index 58%
rename from rabbitmq-server/scripts/rabbitmq-server
rename to deps/rabbit/scripts/rabbitmq-server
index 71a6530321681f782ad84c623f889e92f247e51d..7b0599e88fc6ad57542b0f8ab21786d72927d7af 100755 (executable)
@@ -38,15 +38,44 @@ case "$(uname -s)" in
            if [ $detached ]; then
                echo "Warning: PID file not written; -detached was passed." 1>&2
            else
-               mkdir -p $(dirname ${RABBITMQ_PID_FILE});
-               echo $$ > ${RABBITMQ_PID_FILE}
+               RABBITMQ_PID_DIR="$(dirname ${RABBITMQ_PID_FILE})"
+               EX_CANTCREAT=73 # Standard exit code from sysexits(2)
+               if ! mkdir -p "$RABBITMQ_PID_DIR"; then
+                   # Better diagnostics - 'mkdir -p' reports only the first directory in chain that
+                   # it fails to create
+                   echo "Failed to create directory: $RABBITMQ_PID_DIR"
+                   exit $EX_CANTCREAT
+               fi
+               if ! echo $$ > ${RABBITMQ_PID_FILE}; then
+                   # Better diagnostics - otherwise the only report in logs is about failed 'echo'
+                   # command, but without any other details: neither what script has failed nor what
+                   # file output was redirected to.
+                   echo "Failed to write pid file: ${RABBITMQ_PID_FILE}"
+                   exit $EX_CANTCREAT
+               fi
            fi
 esac
 
 RABBITMQ_EBIN_ROOT="${RABBITMQ_HOME}/ebin"
 
+[ "$NOTIFY_SOCKET" ] && RUNNING_UNDER_SYSTEMD=true
+
 set +e
 
+# `net_kernel:start/1` will fail in `longnames` mode when erlang is
+# unable to determine FQDN of a node (with a dot in it). But `erl`
+# itself has some magic that still allow it to start when you
+# explicitly specify host (a.la `erl -name test@localhost`).
+#
+# It's not possible to communicate with this node, unless it's a
+# connection initiator. But as prelaunch IS an initiator, it doesn't
+# matter what we actually put here. But `localhost` sounds good
+# enough.
+RABBITMQ_PRELAUNCH_NODENAME="rabbitmqprelaunch${$}@localhost"
+
+# NOTIFY_SOCKET is needed here to prevent epmd from impersonating the
+# success of our startup sequence to systemd.
+NOTIFY_SOCKET= \
 RABBITMQ_CONFIG_FILE=$RABBITMQ_CONFIG_FILE \
 RABBITMQ_DIST_PORT=$RABBITMQ_DIST_PORT \
     ${ERL_DIR}erl -pa "$RABBITMQ_EBIN_ROOT" \
@@ -54,7 +83,7 @@ RABBITMQ_DIST_PORT=$RABBITMQ_DIST_PORT \
     -noinput \
     -hidden \
     -s rabbit_prelaunch \
-    ${RABBITMQ_NAME_TYPE} rabbitmqprelaunch$$ \
+    ${RABBITMQ_NAME_TYPE} ${RABBITMQ_PRELAUNCH_NODENAME} \
     -extra "${RABBITMQ_NODENAME}"
 
 PRELAUNCH_RESULT=$?
@@ -99,10 +128,31 @@ fi
 # there is no other way of preventing their expansion.
 set -f
 
+# Lazy initialization of threed pool size - if it wasn't set
+# explicitly. This parameter is only needed when server is starting,
+# so it makes no sense to do this calculations in rabbitmq-env or
+# rabbitmq-defaults scripts.
+ensure_thread_pool_size() {
+    if [ -z "${RABBITMQ_IO_THREAD_POOL_SIZE}" ]; then
+        RABBITMQ_IO_THREAD_POOL_SIZE=$(
+            ${ERL_DIR}erl -pa "$RABBITMQ_EBIN_ROOT" \
+                      -boot "${CLEAN_BOOT_FILE}" \
+                      -noinput \
+                      -s rabbit_misc report_default_thread_pool_size
+        )
+    fi
+}
+
 start_rabbitmq_server() {
+    # "-pa ${RABBITMQ_SERVER_CODE_PATH}" should be the very first
+    # command-line argument. In case of using cached HiPE-compilation,
+    # this will allow for compiled versions of erlang built-in modules
+    # (e.g. lists) to be loaded.
+    ensure_thread_pool_size
+    check_start_params &&
     RABBITMQ_CONFIG_FILE=$RABBITMQ_CONFIG_FILE \
     exec ${ERL_DIR}erl \
-        -pa ${RABBITMQ_EBIN_ROOT} \
+        -pa ${RABBITMQ_SERVER_CODE_PATH} ${RABBITMQ_EBIN_ROOT} \
         ${RABBITMQ_START_RABBIT} \
         ${RABBITMQ_NAME_TYPE} ${RABBITMQ_NODENAME} \
         -boot "${SASL_BOOT_FILE}" \
@@ -138,16 +188,47 @@ stop_rabbitmq_server() {
     fi
 }
 
-if [ 'x' = "x$RABBITMQ_ALLOW_INPUT" -a -z "$detached" ]; then
+check_start_params() {
+    check_not_empty RABBITMQ_BOOT_MODULE
+    check_not_empty RABBITMQ_NAME_TYPE
+    check_not_empty RABBITMQ_NODENAME
+    check_not_empty SASL_BOOT_FILE
+    check_not_empty RABBITMQ_IO_THREAD_POOL_SIZE
+}
+
+check_not_empty() {
+    local name="${1:?}"
+    local value
+    eval value=\$$name
+    if [ -z "$value" ]; then
+        echo "Error: ENV variable should be defined: $1.
+       Please check rabbitmq-env, rabbitmq-defaults, and ${RABBITMQ_CONF_ENV_FILE} script files"
+        exit 78
+    fi
+}
+
+if [ "$RABBITMQ_ALLOW_INPUT" -o "$RUNNING_UNDER_SYSTEMD" -o "$detached" ]; then
+    # Run erlang VM directly, completely replacing current shell
+    # process - so the pid file written in the code above will be
+    # valid (unless detached, which is also handled in the code
+    # above).
+    #
+    # And also this is the correct mode to run the broker under
+    # systemd - there is no need in a proxy process that converts
+    # signals to graceful shutdown command, the unit file should already
+    # contain instructions for graceful shutdown. Also by removing
+    # this additional process we could simply use value returned by
+    # `os:getpid/0` for a systemd ready notification.
+    start_rabbitmq_server "$@"
+else
     # When RabbitMQ runs in the foreground but the Erlang shell is
     # disabled, we setup signal handlers to stop RabbitMQ properly. This
     # is at least useful in the case of Docker.
-
     # The Erlang VM should ignore SIGINT.
-    RABBITMQ_SERVER_START_ARGS="${RABBITMQ_SERVER_START_ARGS} +B i"
+    RABBITMQ_SERVER_START_ARGS="${RABBITMQ_SERVER_START_ARGS} ${RABBITMQ_IGNORE_SIGINT_FLAG}"
 
     # Signal handlers. They all stop RabbitMQ properly (using
-    # rabbitmqctl stop). Depending on the signal, this script will exwit
+    # rabbitmqctl stop). Depending on the signal, this script will exit
     # with a non-zero error code:
     #   SIGHUP SIGTERM SIGTSTP
     #     They are considered a normal process termination, so the script
@@ -161,7 +242,6 @@ if [ 'x' = "x$RABBITMQ_ALLOW_INPUT" -a -z "$detached" ]; then
     start_rabbitmq_server "$@" &
 
     # Block until RabbitMQ exits or a signal is caught.
-    wait
-else
-    start_rabbitmq_server "$@"
+    # Waits for last command (which is start_rabbitmq_server)
+    wait $!
 fi
old mode 100755 (executable)
new mode 100644 (file)
similarity index 65%
rename from rabbitmq-server/scripts/rabbitmq-server.bat
rename to deps/rabbit/scripts/rabbitmq-server.bat
index 8f75a48..585a830
@@ -25,7 +25,7 @@ setlocal enabledelayedexpansion
 \r
 REM Get default settings with user overrides for (RABBITMQ_)<var_name>\r
 REM Non-empty defaults should be set in rabbitmq-env\r
-call "%TDP0%\rabbitmq-env.bat"\r
+call "%TDP0%\rabbitmq-env.bat" %~n0\r
 \r
 if not exist "!ERLANG_HOME!\bin\erl.exe" (\r
     echo.\r
@@ -39,7 +39,7 @@ if not exist "!ERLANG_HOME!\bin\erl.exe" (
     exit /B 1\r
 )\r
 \r
-set RABBITMQ_EBIN_ROOT=!TDP0!..\ebin\r
+set RABBITMQ_EBIN_ROOT=!RABBITMQ_HOME!\ebin\r
 \r
 "!ERLANG_HOME!\bin\erl.exe" ^\r
         -pa "!RABBITMQ_EBIN_ROOT!" ^\r
@@ -71,33 +71,63 @@ if not "!RABBITMQ_NODE_IP_ADDRESS!"=="" (
    )\r
 )\r
 \r
+REM If $RABBITMQ_LOGS is '-', send all log messages to stdout. Likewise\r
+REM for RABBITMQ_SASL_LOGS. This is particularily useful for Docker\r
+REM images.\r
+\r
+if "!RABBITMQ_LOGS!" == "-" (\r
+    set RABBIT_ERROR_LOGGER=tty\r
+) else (\r
+    set RABBIT_ERROR_LOGGER={file,\""!RABBITMQ_LOGS:\=/!"\"}\r
+)\r
+\r
+if "!RABBITMQ_SASL_LOGS!" == "-" (\r
+    set SASL_ERROR_LOGGER=tty\r
+    set RABBIT_SASL_ERROR_LOGGER=tty\r
+) else (\r
+    set SASL_ERROR_LOGGER=false\r
+    set RABBIT_SASL_ERROR_LOGGER={file,\""!RABBITMQ_SASL_LOGS:\=/!"\"}\r
+)\r
+\r
 set RABBITMQ_START_RABBIT=\r
+if "!RABBITMQ_ALLOW_INPUT!"=="" (\r
+    set RABBITMQ_START_RABBIT=!RABBITMQ_START_RABBIT! -noinput\r
+)\r
 if "!RABBITMQ_NODE_ONLY!"=="" (\r
-    set RABBITMQ_START_RABBIT=-s "!RABBITMQ_BOOT_MODULE!" boot\r
+    set RABBITMQ_START_RABBIT=!RABBITMQ_START_RABBIT! -s "!RABBITMQ_BOOT_MODULE!" boot\r
 )\r
 \r
 if "!RABBITMQ_IO_THREAD_POOL_SIZE!"=="" (\r
-    set RABBITMQ_IO_THREAD_POOL_ARG=30\r
+    set RABBITMQ_IO_THREAD_POOL_SIZE=64\r
+) \r
+\r
+\r
+set ENV_OK=true\r
+CALL :check_not_empty "RABBITMQ_BOOT_MODULE" !RABBITMQ_BOOT_MODULE! \r
+CALL :check_not_empty "RABBITMQ_NAME_TYPE" !RABBITMQ_NAME_TYPE!\r
+CALL :check_not_empty "RABBITMQ_NODENAME" !RABBITMQ_NODENAME!\r
+\r
+\r
+if "!ENV_OK!"=="false" (\r
+    EXIT /b 78\r
 )\r
 \r
 "!ERLANG_HOME!\bin\erl.exe" ^\r
 -pa "!RABBITMQ_EBIN_ROOT!" ^\r
--noinput ^\r
 -boot start_sasl ^\r
 !RABBITMQ_START_RABBIT! ^\r
 !RABBITMQ_CONFIG_ARG! ^\r
 !RABBITMQ_NAME_TYPE! !RABBITMQ_NODENAME! ^\r
 +W w ^\r
 +A "!RABBITMQ_IO_THREAD_POOL_SIZE!" ^\r
-+P 1048576 ^\r
-!RABBITMQ_LISTEN_ARG! ^\r
 !RABBITMQ_SERVER_ERL_ARGS! ^\r
+!RABBITMQ_LISTEN_ARG! ^\r
 -kernel inet_default_connect_options "[{nodelay, true}]" ^\r
 !RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS! ^\r
 -sasl errlog_type error ^\r
--sasl sasl_error_logger false ^\r
--rabbit error_logger {file,\""!LOGS:\=/!"\"} ^\r
--rabbit sasl_error_logger {file,\""!SASL_LOGS:\=/!"\"} ^\r
+-sasl sasl_error_logger !SASL_ERROR_LOGGER! ^\r
+-rabbit error_logger !RABBIT_ERROR_LOGGER! ^\r
+-rabbit sasl_error_logger !RABBIT_SASL_ERROR_LOGGER! ^\r
 -rabbit enabled_plugins_file \""!RABBITMQ_ENABLED_PLUGINS_FILE:\=/!"\" ^\r
 -rabbit plugins_dir \""!RABBITMQ_PLUGINS_DIR:\=/!"\" ^\r
 -rabbit plugins_expand_dir \""!RABBITMQ_PLUGINS_EXPAND_DIR:\=/!"\" ^\r
@@ -109,5 +139,16 @@ if "!RABBITMQ_IO_THREAD_POOL_SIZE!"=="" (
 !RABBITMQ_DIST_ARG! ^\r
 !STAR!\r
 \r
+EXIT /B 0\r
+\r
+:check_not_empty\r
+if "%~2"=="" (\r
+    ECHO "Error: ENV variable should be defined: %1. Please check rabbitmq-env and rabbitmq-defaults, and !RABBITMQ_CONF_ENV_FILE! script files. Check also your Environment Variables settings"\r
+    set ENV_OK=false\r
+    EXIT /B 78 \r
+    )\r
+EXIT /B 0\r
+\r
 endlocal\r
 endlocal\r
+\r
old mode 100755 (executable)
new mode 100644 (file)
similarity index 81%
rename from rabbitmq-server/scripts/rabbitmq-service.bat
rename to deps/rabbit/scripts/rabbitmq-service.bat
index 0845bbf..f8a8d5a
@@ -26,7 +26,7 @@ setlocal enabledelayedexpansion
 \r
 REM Get default settings with user overrides for (RABBITMQ_)<var_name>\r
 REM Non-empty defaults should be set in rabbitmq-env\r
-call "%TDP0%\rabbitmq-env.bat"\r
+call "%TDP0%\rabbitmq-env.bat" %~n0\r
 \r
 set STARVAR=\r
 shift\r
@@ -104,6 +104,16 @@ if not exist "!RABBITMQ_BASE!" (
     echo Creating base directory !RABBITMQ_BASE! & md "!RABBITMQ_BASE!"\r
 )\r
 \r
+set ENV_OK=true\r
+CALL :check_not_empty "RABBITMQ_BOOT_MODULE" !RABBITMQ_BOOT_MODULE! \r
+CALL :check_not_empty "RABBITMQ_NAME_TYPE" !RABBITMQ_NAME_TYPE!\r
+CALL :check_not_empty "RABBITMQ_NODENAME" !RABBITMQ_NODENAME!\r
+\r
+\r
+if "!ENV_OK!"=="false" (\r
+    EXIT /b 78\r
+)\r
+\r
 "!ERLANG_SERVICE_MANAGER_PATH!\erlsrv" list !RABBITMQ_SERVICENAME! 2>NUL 1>NUL\r
 if errorlevel 1 (\r
     "!ERLANG_SERVICE_MANAGER_PATH!\erlsrv" add !RABBITMQ_SERVICENAME! -internalservicename !RABBITMQ_SERVICENAME!\r
@@ -111,7 +121,7 @@ if errorlevel 1 (
     echo !RABBITMQ_SERVICENAME! service is already present - only updating service parameters\r
 )\r
 \r
-set RABBITMQ_EBIN_ROOT=!TDP0!..\ebin\r
+set RABBITMQ_EBIN_ROOT=!RABBITMQ_HOME!\ebin\r
 \r
 "!ERLANG_HOME!\bin\erl.exe" ^\r
         -pa "!RABBITMQ_EBIN_ROOT!" ^\r
@@ -131,6 +141,12 @@ if ERRORLEVEL 3 (
     set RABBITMQ_DIST_ARG=-kernel inet_dist_listen_min !RABBITMQ_DIST_PORT! -kernel inet_dist_listen_max !RABBITMQ_DIST_PORT!\r
 )\r
 \r
+    REM Try to create config file, if it doesn't exist\r
+    REM It still can fail to be created, but at least not for default install\r
+if not exist "!RABBITMQ_CONFIG_FILE!.config" (\r
+    echo []. > !RABBITMQ_CONFIG_FILE!.config\r
+)\r
+\r
 if exist "!RABBITMQ_CONFIG_FILE!.config" (\r
     set RABBITMQ_CONFIG_ARG=-config "!RABBITMQ_CONFIG_FILE!"\r
 ) else (\r
@@ -150,7 +166,11 @@ if "!RABBITMQ_NODE_ONLY!"=="" (
 )\r
 \r
 if "!RABBITMQ_IO_THREAD_POOL_SIZE!"=="" (\r
-    set RABBITMQ_IO_THREAD_POOL_SIZE=30\r
+    set RABBITMQ_IO_THREAD_POOL_SIZE=64\r
+)\r
+\r
+if "!RABBITMQ_SERVICE_RESTART!"=="" (\r
+    set RABBITMQ_SERVICE_RESTART=restart\r
 )\r
 \r
 set ERLANG_SERVICE_ARGUMENTS= ^\r
@@ -160,15 +180,14 @@ set ERLANG_SERVICE_ARGUMENTS= ^
 !RABBITMQ_CONFIG_ARG! ^\r
 +W w ^\r
 +A "!RABBITMQ_IO_THREAD_POOL_SIZE!" ^\r
-+P 1048576 ^\r
-!RABBITMQ_LISTEN_ARG! ^\r
 !RABBITMQ_SERVER_ERL_ARGS! ^\r
+!RABBITMQ_LISTEN_ARG! ^\r
 -kernel inet_default_connect_options "[{nodelay,true}]" ^\r
 !RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS! ^\r
 -sasl errlog_type error ^\r
 -sasl sasl_error_logger false ^\r
--rabbit error_logger {file,\""!LOGS:\=/!"\"} ^\r
--rabbit sasl_error_logger {file,\""!SASL_LOGS:\=/!"\"} ^\r
+-rabbit error_logger {file,\""!RABBITMQ_LOGS:\=/!"\"} ^\r
+-rabbit sasl_error_logger {file,\""!RABBITMQ_SASL_LOGS:\=/!"\"} ^\r
 -rabbit enabled_plugins_file \""!RABBITMQ_ENABLED_PLUGINS_FILE:\=/!"\" ^\r
 -rabbit plugins_dir \""!RABBITMQ_PLUGINS_DIR:\=/!"\" ^\r
 -rabbit plugins_expand_dir \""!RABBITMQ_PLUGINS_EXPAND_DIR:\=/!"\" ^\r
@@ -184,9 +203,13 @@ set ERLANG_SERVICE_ARGUMENTS= ^
 set ERLANG_SERVICE_ARGUMENTS=!ERLANG_SERVICE_ARGUMENTS:\=\\!\r
 set ERLANG_SERVICE_ARGUMENTS=!ERLANG_SERVICE_ARGUMENTS:"=\"!\r
 \r
+\r
+\r
 "!ERLANG_SERVICE_MANAGER_PATH!\erlsrv" set !RABBITMQ_SERVICENAME! ^\r
+-onfail !RABBITMQ_SERVICE_RESTART! ^\r
 -machine "!ERLANG_SERVICE_MANAGER_PATH!\erl.exe" ^\r
 -env ERL_CRASH_DUMP="!RABBITMQ_BASE:\=/!/erl_crash.dump" ^\r
+-env ERL_LIBS="!ERL_LIBS!" ^\r
 -workdir "!RABBITMQ_BASE!" ^\r
 -stopaction "rabbit:stop_and_halt()." ^\r
 !RABBITMQ_NAME_TYPE! !RABBITMQ_NODENAME! ^\r
@@ -205,5 +228,15 @@ goto END
 \r
 :END\r
 \r
+EXIT /B 0\r
+\r
+:check_not_empty\r
+if "%~2"=="" (\r
+    ECHO "Error: ENV variable should be defined: %1. Please check rabbitmq-env, rabbitmq-default, and !RABBITMQ_CONF_ENV_FILE! script files. Check also your Environment Variables settings"\r
+    set ENV_OK=false\r
+    EXIT /B 78 \r
+    )\r
+EXIT /B 0\r
+\r
 endlocal\r
 endlocal\r
similarity index 89%
rename from rabbitmq-server/scripts/rabbitmqctl
rename to deps/rabbit/scripts/rabbitmqctl
index 03f8765e27b6b679a635db088a66181c14200d70..2336c3d466130828e441444951b4427708c6d8c6 100755 (executable)
 # Non-empty defaults should be set in rabbitmq-env
 . `dirname $0`/rabbitmq-env
 
+# Disable erl_crash.dump by default for control scripts.
+if [ -z "$ERL_CRASH_DUMP_SECONDS" ]; then
+    export ERL_CRASH_DUMP_SECONDS=0
+fi
+
 # We specify Mnesia dir and sasl error logger since some actions
 # (e.g. forget_cluster_node --offline) require us to impersonate the
 # real node.
 RABBITMQ_USE_LONGNAME=${RABBITMQ_USE_LONGNAME} \
 exec ${ERL_DIR}erl \
     -pa "${RABBITMQ_HOME}/ebin" \
-    -noinput \
+    -noinput +B \
     -hidden \
     ${RABBITMQ_CTL_ERL_ARGS} \
     -boot "${CLEAN_BOOT_FILE}" \
old mode 100755 (executable)
new mode 100644 (file)
similarity index 86%
rename from rabbitmq-server/scripts/rabbitmqctl.bat
rename to deps/rabbit/scripts/rabbitmqctl.bat
index 45e2929..56e856f
@@ -24,6 +24,10 @@ set TDP0=%~dp0
 set STAR=%*\r
 setlocal enabledelayedexpansion\r
 \r
+REM Get default settings with user overrides for (RABBITMQ_)<var_name>\r
+REM Non-empty defaults should be set in rabbitmq-env\r
+call "%TDP0%\rabbitmq-env.bat" %~n0\r
+\r
 if not exist "!ERLANG_HOME!\bin\erl.exe" (\r
     echo.\r
     echo ******************************\r
@@ -36,12 +40,13 @@ if not exist "!ERLANG_HOME!\bin\erl.exe" (
     exit /B 1\r
 )\r
 \r
-REM Get default settings with user overrides for (RABBITMQ_)<var_name>\r
-REM Non-empty defaults should be set in rabbitmq-env\r
-call "%TDP0%\rabbitmq-env.bat"\r
+REM Disable erl_crash.dump by default for control scripts.\r
+if not defined ERL_CRASH_DUMP_SECONDS (\r
+    set ERL_CRASH_DUMP_SECONDS=0\r
+)\r
 \r
 "!ERLANG_HOME!\bin\erl.exe" ^\r
--pa "!TDP0!..\ebin" ^\r
+-pa "!RABBITMQ_HOME!\ebin" ^\r
 -noinput ^\r
 -hidden ^\r
 !RABBITMQ_CTL_ERL_ARGS! ^\r
similarity index 89%
rename from rabbitmq-server/src/background_gc.erl
rename to deps/rabbit/src/background_gc.erl
index 0dafde6dc24fabc2271ed731910fc61d879ef859..2986f356f5e6f9cb09fba52eaa24f2388137c454 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(background_gc).
 
 -define(MAX_RATIO, 0.01).
 -define(IDEAL_INTERVAL, 60000).
+-define(MAX_INTERVAL, 240000).
 
 -record(state, {last_interval}).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> {'ok', pid()} | {'error', any()}).
--spec(run/0 :: () -> 'ok').
--spec(gc/0 :: () -> 'ok').
-
--endif.
+-spec start_link() -> {'ok', pid()} | {'error', any()}.
+-spec run() -> 'ok'.
+-spec gc() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -70,7 +67,7 @@ terminate(_Reason, State) -> State.
 interval_gc(State = #state{last_interval = LastInterval}) ->
     {ok, Interval} = rabbit_misc:interval_operation(
                        {?MODULE, gc, []},
-                       ?MAX_RATIO, ?IDEAL_INTERVAL, LastInterval),
+                       ?MAX_RATIO, ?MAX_INTERVAL, ?IDEAL_INTERVAL, LastInterval),
     erlang:send_after(Interval, self(), run),
     State#state{last_interval = Interval}.
 
similarity index 91%
rename from rabbitmq-server/src/delegate.erl
rename to deps/rabbit/src/delegate.erl
index 4bf570cc27b9452f9c8bbce49916c4ed9440b3a2..778137c1c72da055f3cb2a90c1d00c5a4414f7b4 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(delegate).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([monitor_ref/0]).
 
--type(monitor_ref() :: reference() | {atom(), pid()}).
--type(fun_or_mfa(A) :: fun ((pid()) -> A) | {atom(), atom(), [any()]}).
+-type monitor_ref() :: reference() | {atom(), pid()}.
+-type fun_or_mfa(A) :: fun ((pid()) -> A) | {atom(), atom(), [any()]}.
 
--spec(start_link/1 ::
-        (non_neg_integer()) -> {'ok', pid()} | ignore | {'error', any()}).
--spec(invoke/2 :: ( pid(),  fun_or_mfa(A)) -> A;
-                  ([pid()], fun_or_mfa(A)) -> {[{pid(), A}],
-                                               [{pid(), term()}]}).
--spec(invoke_no_result/2 :: (pid() | [pid()], fun_or_mfa(any())) -> 'ok').
--spec(monitor/2 :: ('process', pid()) -> monitor_ref()).
--spec(demonitor/1 :: (monitor_ref()) -> 'true').
+-spec start_link
+        (non_neg_integer()) -> {'ok', pid()} | ignore | {'error', any()}.
+-spec invoke
+        ( pid(),  fun_or_mfa(A)) -> A;
+        ([pid()], fun_or_mfa(A)) -> {[{pid(), A}], [{pid(), term()}]}.
+-spec invoke_no_result(pid() | [pid()], fun_or_mfa(any())) -> 'ok'.
+-spec monitor('process', pid()) -> monitor_ref().
+-spec demonitor(monitor_ref()) -> 'true'.
 
--spec(call/2 ::
+-spec call
         ( pid(),  any()) -> any();
-        ([pid()], any()) -> {[{pid(), any()}], [{pid(), term()}]}).
--spec(cast/2 :: (pid() | [pid()], any()) -> 'ok').
-
--endif.
+        ([pid()], any()) -> {[{pid(), any()}], [{pid(), term()}]}.
+-spec cast(pid() | [pid()], any()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -131,7 +127,7 @@ invoke(Pids, FunOrMFA) when is_list(Pids) ->
       end, {[], BadPids}, ResultsNoNode).
 
 invoke_no_result(Pid, FunOrMFA) when is_pid(Pid) andalso node(Pid) =:= node() ->
-    safe_invoke(Pid, FunOrMFA), %% we don't care about any error
+    _ = safe_invoke(Pid, FunOrMFA), %% we don't care about any error
     ok;
 invoke_no_result(Pid, FunOrMFA) when is_pid(Pid) ->
     invoke_no_result([Pid], FunOrMFA);
@@ -139,7 +135,7 @@ invoke_no_result(Pid, FunOrMFA) when is_pid(Pid) ->
 invoke_no_result([], _FunOrMFA) -> %% optimisation
     ok;
 invoke_no_result([Pid], FunOrMFA) when node(Pid) =:= node() -> %% optimisation
-    safe_invoke(Pid, FunOrMFA), %% must not die
+    _ = safe_invoke(Pid, FunOrMFA), %% must not die
     ok;
 invoke_no_result(Pids, FunOrMFA) when is_list(Pids) ->
     {LocalPids, Grouped} = group_pids_by_node(Pids),
@@ -149,7 +145,7 @@ invoke_no_result(Pids, FunOrMFA) when is_list(Pids) ->
                          RemoteNodes, delegate(self(), RemoteNodes),
                          {invoke, FunOrMFA, Grouped})
     end,
-    safe_invoke(LocalPids, FunOrMFA), %% must not die
+    _ = safe_invoke(LocalPids, FunOrMFA), %% must not die
     ok.
 
 monitor(process, Pid) when node(Pid) =:= node() ->
@@ -247,7 +243,7 @@ handle_cast({demonitor, MonitoringPid, Pid},
     {noreply, State#state{monitors = Monitors1}, hibernate};
 
 handle_cast({invoke, FunOrMFA, Grouped}, State = #state{node = Node}) ->
-    safe_invoke(orddict:fetch(Node, Grouped), FunOrMFA),
+    _ = safe_invoke(orddict:fetch(Node, Grouped), FunOrMFA),
     {noreply, State, hibernate}.
 
 handle_info({'DOWN', Ref, process, Pid, Info},
similarity index 88%
rename from rabbitmq-server/src/delegate_sup.erl
rename to deps/rabbit/src/delegate_sup.erl
index a285667d62b24956f31399bd77c05a0265c5c36e..ba0964f9dd6ce9980fdf3217539e376099db76b0 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(delegate_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/1 :: (integer()) -> rabbit_types:ok_pid_or_error()).
--spec(count/1 :: ([node()]) -> integer()).
-
--endif.
+-spec start_link(integer()) -> rabbit_types:ok_pid_or_error().
+-spec count([node()]) -> integer().
 
 %%----------------------------------------------------------------------------
 
similarity index 86%
rename from rabbitmq-server/src/dtree.erl
rename to deps/rabbit/src/dtree.erl
index 0fef3b2d1f16f61d54326258486e6b5ff1ea6158..a2232c06874fbbbe5da2485f42830327f147ada0 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% A dual-index tree.
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([?MODULE/0]).
 
--opaque(?MODULE()  :: {gb_trees:tree(), gb_trees:tree()}).
-
--type(pk()         :: any()).
--type(sk()         :: any()).
--type(val()        :: any()).
--type(kv()         :: {pk(), val()}).
+-opaque ?MODULE()  :: {gb_trees:tree(), gb_trees:tree()}.
 
--spec(empty/0      :: () -> ?MODULE()).
--spec(insert/4     :: (pk(), [sk()], val(), ?MODULE()) -> ?MODULE()).
--spec(take/3       :: ([pk()], sk(), ?MODULE()) -> {[kv()], ?MODULE()}).
--spec(take/2       :: (sk(), ?MODULE()) -> {[kv()], ?MODULE()}).
--spec(take_all/2   :: (sk(), ?MODULE()) -> {[kv()], ?MODULE()}).
--spec(drop/2       :: (pk(), ?MODULE()) -> ?MODULE()).
--spec(is_defined/2 :: (sk(), ?MODULE()) -> boolean()).
--spec(is_empty/1   :: (?MODULE()) -> boolean()).
--spec(smallest/1   :: (?MODULE()) -> kv()).
--spec(size/1       :: (?MODULE()) -> non_neg_integer()).
+-type pk()         :: any().
+-type sk()         :: any().
+-type val()        :: any().
+-type kv()         :: {pk(), val()}.
 
--endif.
+-spec empty() -> ?MODULE().
+-spec insert(pk(), [sk()], val(), ?MODULE()) -> ?MODULE().
+-spec take([pk()], sk(), ?MODULE()) -> {[kv()], ?MODULE()}.
+-spec take(sk(), ?MODULE()) -> {[kv()], ?MODULE()}.
+-spec take_all(sk(), ?MODULE()) -> {[kv()], ?MODULE()}.
+-spec drop(pk(), ?MODULE()) -> ?MODULE().
+-spec is_defined(sk(), ?MODULE()) -> boolean().
+-spec is_empty(?MODULE()) -> boolean().
+-spec smallest(?MODULE()) -> kv().
+-spec size(?MODULE()) -> non_neg_integer().
 
 %%----------------------------------------------------------------------------
 
@@ -69,7 +65,7 @@ empty() -> {gb_trees:empty(), gb_trees:empty()}.
 %% primary key.
 insert(PK, [], V, {P, S}) ->
     %% dummy insert to force error if PK exists
-    gb_trees:insert(PK, {gb_sets:empty(), V}, P),
+    _ = gb_trees:insert(PK, {gb_sets:empty(), V}, P),
     {P, S};
 insert(PK, SKs, V, {P, S}) ->
     {gb_trees:insert(PK, {gb_sets:from_list(SKs), V}, P),
similarity index 89%
rename from rabbitmq-server/src/file_handle_cache.erl
rename to deps/rabbit/src/file_handle_cache.erl
index d7e5abc8730bb241e3bfa931752db00659de9afc..e4af1e8c1a5e39df437f12cf9afac39e0b2d15c6 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(file_handle_cache).
 -export([register_callback/3]).
 -export([open/3, close/1, read/2, append/2, needs_sync/1, sync/1, position/2,
          truncate/1, current_virtual_offset/1, current_raw_offset/1, flush/1,
-         copy/3, set_maximum_since_use/1, delete/1, clear/1]).
+         copy/3, set_maximum_since_use/1, delete/1, clear/1,
+         open_with_absolute_path/3]).
 -export([obtain/0, obtain/1, release/0, release/1, transfer/1, transfer/2,
          set_limit/1, get_limit/0, info_keys/0, with_handle/1, with_handle/2,
          info/0, info/1, clear_read_cache/0]).
 
 -record(handle,
         { hdl,
+          ref,
           offset,
           is_dirty,
           write_buffer_size,
 %% Specs
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(ref() :: any()).
--type(ok_or_error() :: 'ok' | {'error', any()}).
--type(val_or_error(T) :: {'ok', T} | {'error', any()}).
--type(position() :: ('bof' | 'eof' | non_neg_integer() |
+-type ref() :: any().
+-type ok_or_error() :: 'ok' | {'error', any()}.
+-type val_or_error(T) :: {'ok', T} | {'error', any()}.
+-type position() :: ('bof' | 'eof' | non_neg_integer() |
                      {('bof' |'eof'), non_neg_integer()} |
-                     {'cur', integer()})).
--type(offset() :: non_neg_integer()).
+                     {'cur', integer()}).
+-type offset() :: non_neg_integer().
 
--spec(register_callback/3 :: (atom(), atom(), [any()]) -> 'ok').
--spec(open/3 ::
+-spec register_callback(atom(), atom(), [any()]) -> 'ok'.
+-spec open
+        (file:filename(), [any()],
+         [{'write_buffer', (non_neg_integer() | 'infinity' | 'unbuffered')} |
+          {'read_buffer', (non_neg_integer() | 'unbuffered')}]) ->
+            val_or_error(ref()).
+-spec open_with_absolute_path
         (file:filename(), [any()],
          [{'write_buffer', (non_neg_integer() | 'infinity' | 'unbuffered')} |
-          {'read_buffer', (non_neg_integer() | 'unbuffered')}])
-        -> val_or_error(ref())).
--spec(close/1 :: (ref()) -> ok_or_error()).
--spec(read/2 :: (ref(), non_neg_integer()) ->
-                     val_or_error([char()] | binary()) | 'eof').
--spec(append/2 :: (ref(), iodata()) -> ok_or_error()).
--spec(sync/1 :: (ref()) ->  ok_or_error()).
--spec(position/2 :: (ref(), position()) -> val_or_error(offset())).
--spec(truncate/1 :: (ref()) -> ok_or_error()).
--spec(current_virtual_offset/1 :: (ref()) -> val_or_error(offset())).
--spec(current_raw_offset/1     :: (ref()) -> val_or_error(offset())).
--spec(flush/1 :: (ref()) -> ok_or_error()).
--spec(copy/3 :: (ref(), ref(), non_neg_integer()) ->
-                     val_or_error(non_neg_integer())).
--spec(delete/1 :: (ref()) -> ok_or_error()).
--spec(clear/1 :: (ref()) -> ok_or_error()).
--spec(set_maximum_since_use/1 :: (non_neg_integer()) -> 'ok').
--spec(obtain/0 :: () -> 'ok').
--spec(obtain/1 :: (non_neg_integer()) -> 'ok').
--spec(release/0 :: () -> 'ok').
--spec(release/1 :: (non_neg_integer()) -> 'ok').
--spec(transfer/1 :: (pid()) -> 'ok').
--spec(transfer/2 :: (pid(), non_neg_integer()) -> 'ok').
--spec(with_handle/1 :: (fun(() -> A)) -> A).
--spec(with_handle/2 :: (non_neg_integer(), fun(() -> A)) -> A).
--spec(set_limit/1 :: (non_neg_integer()) -> 'ok').
--spec(get_limit/0 :: () -> non_neg_integer()).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(info/0 :: () -> rabbit_types:infos()).
--spec(info/1 :: ([atom()]) -> rabbit_types:infos()).
--spec(ulimit/0 :: () -> 'unknown' | non_neg_integer()).
-
--endif.
+          {'read_buffer', (non_neg_integer() | 'unbuffered')}]) ->
+            val_or_error(ref()).
+-spec close(ref()) -> ok_or_error().
+-spec read
+        (ref(), non_neg_integer()) -> val_or_error([char()] | binary()) | 'eof'.
+-spec append(ref(), iodata()) -> ok_or_error().
+-spec sync(ref()) ->  ok_or_error().
+-spec position(ref(), position()) -> val_or_error(offset()).
+-spec truncate(ref()) -> ok_or_error().
+-spec current_virtual_offset(ref()) -> val_or_error(offset()).
+-spec current_raw_offset(ref()) -> val_or_error(offset()).
+-spec flush(ref()) -> ok_or_error().
+-spec copy(ref(), ref(), non_neg_integer()) -> val_or_error(non_neg_integer()).
+-spec delete(ref()) -> ok_or_error().
+-spec clear(ref()) -> ok_or_error().
+-spec set_maximum_since_use(non_neg_integer()) -> 'ok'.
+-spec obtain() -> 'ok'.
+-spec obtain(non_neg_integer()) -> 'ok'.
+-spec release() -> 'ok'.
+-spec release(non_neg_integer()) -> 'ok'.
+-spec transfer(pid()) -> 'ok'.
+-spec transfer(pid(), non_neg_integer()) -> 'ok'.
+-spec with_handle(fun(() -> A)) -> A.
+-spec with_handle(non_neg_integer(), fun(() -> A)) -> A.
+-spec set_limit(non_neg_integer()) -> 'ok'.
+-spec get_limit() -> non_neg_integer().
+-spec info_keys() -> rabbit_types:info_keys().
+-spec info() -> rabbit_types:infos().
+-spec info([atom()]) -> rabbit_types:infos().
+-spec ulimit() -> 'unknown' | non_neg_integer().
 
 %%----------------------------------------------------------------------------
 -define(INFO_KEYS, [total_limit, total_used, sockets_limit, sockets_used]).
@@ -299,9 +301,11 @@ register_callback(M, F, A)
     gen_server2:cast(?SERVER, {register_callback, self(), {M, F, A}}).
 
 open(Path, Mode, Options) ->
-    Path1 = filename:absname(Path),
+    open_with_absolute_path(filename:absname(Path), Mode, Options).
+
+open_with_absolute_path(Path, Mode, Options) ->
     File1 = #file { reader_count = RCount, has_writer = HasWriter } =
-        case get({Path1, fhc_file}) of
+        case get({Path, fhc_file}) of
             File = #file {} -> File;
             undefined       -> #file { reader_count = 0,
                                        has_writer = false }
@@ -310,15 +314,15 @@ open(Path, Mode, Options) ->
     IsWriter = is_writer(Mode1),
     case IsWriter andalso HasWriter of
         true  -> {error, writer_exists};
-        false -> {ok, Ref} = new_closed_handle(Path1, Mode1, Options),
-                 case get_or_reopen([{Ref, new}]) of
+        false -> {ok, Ref} = new_closed_handle(Path, Mode1, Options),
+                 case get_or_reopen_timed([{Ref, new}]) of
                      {ok, [_Handle1]} ->
                          RCount1 = case is_reader(Mode1) of
                                        true  -> RCount + 1;
                                        false -> RCount
                                    end,
                          HasWriter1 = HasWriter orelse IsWriter,
-                         put({Path1, fhc_file},
+                         put({Path, fhc_file},
                              File1 #file { reader_count = RCount1,
                                            has_writer = HasWriter1 }),
                          {ok, Ref};
@@ -374,7 +378,7 @@ read(Ref, Count) ->
                                offset           = Offset}
                   = tune_read_buffer_limit(Handle0, Count),
               WantedCount = Count - BufRem,
-              case prim_file_read(Hdl, lists:max([BufSz, WantedCount])) of
+              case prim_file_read(Hdl, max(BufSz, WantedCount)) of
                   {ok, Data} ->
                       <<_:BufPos/binary, BufTl/binary>> = Buf,
                       ReadCount = size(Data),
@@ -536,12 +540,15 @@ clear(Ref) ->
       end).
 
 set_maximum_since_use(MaximumAge) ->
-    Now = now(),
+    Now = time_compat:monotonic_time(),
     case lists:foldl(
            fun ({{Ref, fhc_handle},
                  Handle = #handle { hdl = Hdl, last_used_at = Then }}, Rep) ->
                    case Hdl =/= closed andalso
-                       timer:now_diff(Now, Then) >= MaximumAge of
+                        time_compat:convert_time_unit(Now - Then,
+                                                      native,
+                                                      micro_seconds)
+                          >= MaximumAge of
                        true  -> soft_close(Ref, Handle) orelse Rep;
                        false -> Rep
                    end;
@@ -595,9 +602,11 @@ info(Items) -> gen_server2:call(?SERVER, {info, Items}, infinity).
 
 clear_read_cache() ->
     case application:get_env(rabbit, fhc_read_buffering) of
-        false -> ok;
-        true  -> gen_server2:cast(?SERVER, clear_read_cache),
-                 clear_vhost_read_cache(rabbit_vhost:list())
+        {ok, true} ->
+            gen_server2:cast(?SERVER, clear_read_cache),
+            clear_vhost_read_cache(rabbit_vhost:list());
+        _ -> %% undefined or {ok, false}
+            ok
     end.
 
 clear_vhost_read_cache([]) ->
@@ -615,7 +624,7 @@ clear_queue_read_cache([#amqqueue{pid = MPid, slave_pids = SPids} | Rest]) ->
     %% process because the read buffer is stored in the process
     %% dictionary.
     Fun = fun(_, State) ->
-                  clear_process_read_cache(),
+                  _ = clear_process_read_cache(),
                   State
           end,
     [rabbit_amqqueue:run_backing_queue(Pid, rabbit_variable_queue, Fun)
@@ -665,7 +674,7 @@ with_handles(Refs, Fun) ->
     with_handles(Refs, reset, Fun).
 
 with_handles(Refs, ReadBuffer, Fun) ->
-    case get_or_reopen([{Ref, reopen} || Ref <- Refs]) of
+    case get_or_reopen_timed([{Ref, reopen} || Ref <- Refs]) of
         {ok, Handles0} ->
             Handles = case ReadBuffer of
                           reset -> [reset_read_buffer(H) || H <- Handles0];
@@ -673,7 +682,7 @@ with_handles(Refs, ReadBuffer, Fun) ->
                       end,
             case Fun(Handles) of
                 {Result, Handles1} when is_list(Handles1) ->
-                    lists:zipwith(fun put_handle/2, Refs, Handles1),
+                    _ = lists:zipwith(fun put_handle/2, Refs, Handles1),
                     Result;
                 Result ->
                     Result
@@ -703,12 +712,17 @@ with_flushed_handles(Refs, ReadBuffer, Fun) ->
               end
       end).
 
+get_or_reopen_timed(RefNewOrReopens) ->
+    file_handle_cache_stats:update(
+      io_file_handle_open_attempt, fun() -> get_or_reopen(RefNewOrReopens) end).
+
 get_or_reopen(RefNewOrReopens) ->
     case partition_handles(RefNewOrReopens) of
         {OpenHdls, []} ->
             {ok, [Handle || {_Ref, Handle} <- OpenHdls]};
         {OpenHdls, ClosedHdls} ->
-            Oldest = oldest(get_age_tree(), fun () -> now() end),
+            Oldest = oldest(get_age_tree(),
+                            fun () -> time_compat:monotonic_time() end),
             case gen_server2:call(?SERVER, {open, self(), length(ClosedHdls),
                                             Oldest}, infinity) of
                 ok ->
@@ -744,14 +758,14 @@ reopen([{Ref, NewOrReopen, Handle = #handle { hdl          = closed,
            end,
     case prim_file:open(Path, Mode) of
         {ok, Hdl} ->
-            Now = now(),
+            Now = time_compat:monotonic_time(),
             {{ok, _Offset}, Handle1} =
                 maybe_seek(Offset, reset_read_buffer(
                                      Handle#handle{hdl              = Hdl,
                                                    offset           = 0,
                                                    last_used_at     = Now})),
             put({Ref, fhc_handle}, Handle1),
-            reopen(RefNewOrReopenHdls, gb_trees:insert(Now, Ref, Tree),
+            reopen(RefNewOrReopenHdls, gb_trees:insert({Now, Ref}, true, Tree),
                    [{Ref, Handle1} | RefHdls]);
         Error ->
             %% NB: none of the handles in ToOpen are in the age tree
@@ -780,7 +794,7 @@ sort_handles([{Ref, _} | RefHdls], RefHdlsA, [{Ref, Handle} | RefHdlsB], Acc) ->
     sort_handles(RefHdls, RefHdlsA, RefHdlsB, [Handle | Acc]).
 
 put_handle(Ref, Handle = #handle { last_used_at = Then }) ->
-    Now = now(),
+    Now = time_compat:monotonic_time(),
     age_tree_update(Then, Now, Ref),
     put({Ref, fhc_handle}, Handle #handle { last_used_at = Now }).
 
@@ -797,13 +811,14 @@ put_age_tree(Tree) -> put(fhc_age_tree, Tree).
 age_tree_update(Then, Now, Ref) ->
     with_age_tree(
       fun (Tree) ->
-              gb_trees:insert(Now, Ref, gb_trees:delete_any(Then, Tree))
+              gb_trees:insert({Now, Ref}, true,
+                              gb_trees:delete_any({Then, Ref}, Tree))
       end).
 
-age_tree_delete(Then) ->
+age_tree_delete(Then, Ref) ->
     with_age_tree(
       fun (Tree) ->
-              Tree1 = gb_trees:delete_any(Then, Tree),
+              Tree1 = gb_trees:delete_any({Then, Ref}, Tree),
               Oldest = oldest(Tree1, fun () -> undefined end),
               gen_server2:cast(?SERVER, {close, self(), Oldest}),
               Tree1
@@ -814,16 +829,16 @@ age_tree_change() ->
       fun (Tree) ->
               case gb_trees:is_empty(Tree) of
                   true  -> Tree;
-                  false -> {Oldest, _Ref} = gb_trees:smallest(Tree),
-                           gen_server2:cast(?SERVER, {update, self(), Oldest})
-              end,
-              Tree
+                  false -> {{Oldest, _Ref}, _} = gb_trees:smallest(Tree),
+                           gen_server2:cast(?SERVER, {update, self(), Oldest}),
+                           Tree
+              end
       end).
 
 oldest(Tree, DefaultFun) ->
     case gb_trees:is_empty(Tree) of
         true  -> DefaultFun();
-        false -> {Oldest, _Ref} = gb_trees:smallest(Tree),
+        false -> {{Oldest, _Ref}, _} = gb_trees:smallest(Tree),
                  Oldest
     end.
 
@@ -849,6 +864,7 @@ new_closed_handle(Path, Mode, Options) ->
         end,
     Ref = make_ref(),
     put({Ref, fhc_handle}, #handle { hdl                     = closed,
+                                     ref                     = Ref,
                                      offset                  = 0,
                                      is_dirty                = false,
                                      write_buffer_size       = 0,
@@ -883,6 +899,7 @@ soft_close(Handle = #handle { hdl = closed }) ->
 soft_close(Handle) ->
     case write_buffer(Handle) of
         {ok, #handle { hdl         = Hdl,
+                       ref         = Ref,
                        is_dirty    = IsDirty,
                        last_used_at = Then } = Handle1 } ->
             ok = case IsDirty of
@@ -890,7 +907,7 @@ soft_close(Handle) ->
                      false -> ok
                  end,
             ok = prim_file:close(Hdl),
-            age_tree_delete(Then),
+            age_tree_delete(Then, Ref),
             {ok, Handle1 #handle { hdl            = closed,
                                    is_dirty       = false,
                                    last_used_at   = undefined }};
@@ -1070,7 +1087,7 @@ used(#fhc_state{open_count          = C1,
 %%----------------------------------------------------------------------------
 
 init([AlarmSet, AlarmClear]) ->
-    file_handle_cache_stats:init(),
+    _ = file_handle_cache_stats:init(),
     Limit = case application:get_env(file_handles_high_watermark) of
                 {ok, Watermark} when (is_integer(Watermark) andalso
                                       Watermark > 0) ->
@@ -1209,7 +1226,7 @@ handle_cast({transfer, N, FromPid, ToPid}, State) ->
                                             State)))};
 
 handle_cast(clear_read_cache, State) ->
-    clear_process_read_cache(),
+    _ = clear_process_read_cache(),
     {noreply, State}.
 
 handle_info(check_counts, State) ->
@@ -1287,11 +1304,6 @@ pending_out({N, Queue}) ->
 pending_count({Count, _Queue}) ->
     Count.
 
-pending_is_empty({0, _Queue}) ->
-    true;
-pending_is_empty({_N, _Queue}) ->
-    false.
-
 %%----------------------------------------------------------------------------
 %% server helpers
 %%----------------------------------------------------------------------------
@@ -1338,17 +1350,24 @@ process_open(State = #fhc_state { limit        = Limit,
     {Pending1, State1} = process_pending(Pending, Limit - used(State), State),
     State1 #fhc_state { open_pending = Pending1 }.
 
-process_obtain(Type, State = #fhc_state { limit        = Limit,
-                                          obtain_limit = ObtainLimit }) ->
-    ObtainCount = obtain_state(Type, count, State),
-    Pending = obtain_state(Type, pending, State),
-    Quota = case Type of
-                file   -> Limit - (used(State));
-                socket -> lists:min([ObtainLimit - ObtainCount,
-                                     Limit - (used(State))])
-            end,
+process_obtain(socket, State = #fhc_state { limit        = Limit,
+                                            obtain_limit = ObtainLimit,
+                                            open_count = OpenCount,
+                                            obtain_count_socket = ObtainCount,
+                                            obtain_pending_socket = Pending,
+                                            obtain_count_file = ObtainCountF}) ->
+    Quota = min(ObtainLimit - ObtainCount,
+                Limit - (OpenCount + ObtainCount + ObtainCountF)),
+    {Pending1, State1} = process_pending(Pending, Quota, State),
+    State1#fhc_state{obtain_pending_socket = Pending1};
+process_obtain(file, State = #fhc_state { limit        = Limit,
+                                          open_count = OpenCount,
+                                          obtain_count_socket = ObtainCountS,
+                                          obtain_count_file = ObtainCountF,
+                                          obtain_pending_file = Pending}) ->
+    Quota = Limit - (OpenCount + ObtainCountS + ObtainCountF),
     {Pending1, State1} = process_pending(Pending, Quota, State),
-    set_obtain_state(Type, pending, Pending1, State1).
+    State1#fhc_state{obtain_pending_file = Pending1}.
 
 process_pending(Pending, Quota, State) when Quota =< 0 ->
     {Pending, State};
@@ -1373,26 +1392,21 @@ run_pending_item(#pending { kind      = Kind,
     true = ets:update_element(Clients, Pid, {#cstate.blocked, false}),
     update_counts(Kind, Pid, Requested, State).
 
-update_counts(Kind, Pid, Delta,
+update_counts(open, Pid, Delta,
               State = #fhc_state { open_count          = OpenCount,
-                                   obtain_count_file   = ObtainCountF,
-                                   obtain_count_socket = ObtainCountS,
                                    clients             = Clients }) ->
-    {OpenDelta, ObtainDeltaF, ObtainDeltaS} =
-        update_counts1(Kind, Pid, Delta, Clients),
-    State #fhc_state { open_count          = OpenCount    + OpenDelta,
-                       obtain_count_file   = ObtainCountF + ObtainDeltaF,
-                       obtain_count_socket = ObtainCountS + ObtainDeltaS }.
-
-update_counts1(open, Pid, Delta, Clients) ->
     ets:update_counter(Clients, Pid, {#cstate.opened, Delta}),
-    {Delta, 0, 0};
-update_counts1({obtain, file}, Pid, Delta, Clients) ->
+    State #fhc_state { open_count = OpenCount + Delta};
+update_counts({obtain, file}, Pid, Delta,
+              State = #fhc_state {obtain_count_file   = ObtainCountF,
+                                  clients             = Clients }) ->
     ets:update_counter(Clients, Pid, {#cstate.obtained_file, Delta}),
-    {0, Delta, 0};
-update_counts1({obtain, socket}, Pid, Delta, Clients) ->
+    State #fhc_state { obtain_count_file = ObtainCountF + Delta};
+update_counts({obtain, socket}, Pid, Delta,
+              State = #fhc_state {obtain_count_socket   = ObtainCountS,
+                                  clients             = Clients }) ->
     ets:update_counter(Clients, Pid, {#cstate.obtained_socket, Delta}),
-    {0, 0, Delta}.
+    State #fhc_state { obtain_count_socket = ObtainCountS + Delta}.
 
 maybe_reduce(State) ->
     case needs_reduce(State) of
@@ -1400,18 +1414,20 @@ maybe_reduce(State) ->
         false -> State
     end.
 
-needs_reduce(State = #fhc_state { limit                 = Limit,
-                                  open_pending          = OpenPending,
-                                  obtain_limit          = ObtainLimit,
-                                  obtain_count_socket   = ObtainCountS,
-                                  obtain_pending_file   = ObtainPendingF,
-                                  obtain_pending_socket = ObtainPendingS }) ->
+needs_reduce(#fhc_state { limit                 = Limit,
+                          open_count            = OpenCount,
+                          open_pending          = {OpenPending, _},
+                          obtain_limit          = ObtainLimit,
+                          obtain_count_socket   = ObtainCountS,
+                          obtain_count_file     = ObtainCountF,
+                          obtain_pending_file   = {ObtainPendingF, _},
+                          obtain_pending_socket = {ObtainPendingS, _} }) ->
     Limit =/= infinity
-        andalso ((used(State) > Limit)
-                 orelse (not pending_is_empty(OpenPending))
-                 orelse (not pending_is_empty(ObtainPendingF))
+        andalso (((OpenCount + ObtainCountS + ObtainCountF) > Limit)
+                 orelse (OpenPending =/= 0)
+                 orelse (ObtainPendingF =/= 0)
                  orelse (ObtainCountS < ObtainLimit
-                         andalso not pending_is_empty(ObtainPendingS))).
+                         andalso (ObtainPendingS =/= 0))).
 
 reduce(State = #fhc_state { open_pending          = OpenPending,
                             obtain_pending_file   = ObtainPendingFile,
@@ -1419,17 +1435,19 @@ reduce(State = #fhc_state { open_pending          = OpenPending,
                             elders                = Elders,
                             clients               = Clients,
                             timer_ref             = TRef }) ->
-    Now = now(),
+    Now = time_compat:monotonic_time(),
     {CStates, Sum, ClientCount} =
         ets:foldl(fun ({Pid, Eldest}, {CStatesAcc, SumAcc, CountAcc} = Accs) ->
                           [#cstate { pending_closes = PendingCloses,
                                      opened         = Opened,
                                      blocked        = Blocked } = CState] =
                               ets:lookup(Clients, Pid),
+                          TimeDiff = time_compat:convert_time_unit(
+                            Now - Eldest, native, micro_seconds),
                           case Blocked orelse PendingCloses =:= Opened of
                               true  -> Accs;
                               false -> {[CState | CStatesAcc],
-                                        SumAcc + timer:now_diff(Now, Eldest),
+                                        SumAcc + TimeDiff,
                                         CountAcc + 1}
                           end
                   end, {[], 0, 0}, Elders),
@@ -1463,7 +1481,7 @@ notify_age(CStates, AverageAge) ->
 notify_age0(Clients, CStates, Required) ->
     case [CState || CState <- CStates, CState#cstate.callback =/= undefined] of
         []            -> ok;
-        Notifications -> S = random:uniform(length(Notifications)),
+        Notifications -> S = rand_compat:uniform(length(Notifications)),
                          {L1, L2} = lists:split(S, Notifications),
                          notify(Clients, Required, L2 ++ L1)
     end.
similarity index 87%
rename from rabbitmq-server/src/file_handle_cache_stats.erl
rename to deps/rabbit/src/file_handle_cache_stats.erl
index 5f6926b5d23840dbe57a93c54d8ac5c4ea99b249..12a78f805e2295593646633a290ca7c0bbd75af4 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(file_handle_cache_stats).
@@ -26,7 +26,7 @@
         [io_reopen, mnesia_ram_tx, mnesia_disk_tx,
          msg_store_read, msg_store_write,
          queue_index_journal_write, queue_index_write, queue_index_read]).
--define(COUNT_TIME, [io_sync, io_seek]).
+-define(COUNT_TIME, [io_sync, io_seek, io_file_handle_open_attempt]).
 -define(COUNT_TIME_BYTES, [io_read, io_write]).
 
 init() ->
@@ -58,10 +58,9 @@ update(Op) ->
 get() ->
     lists:sort(ets:tab2list(?TABLE)).
 
-%% TODO timer:tc/1 was introduced in R14B03; use that function once we
-%% require that version.
 timer_tc(Thunk) ->
-    T1 = os:timestamp(),
+    T1 = time_compat:monotonic_time(),
     Res = Thunk(),
-    T2 = os:timestamp(),
-    {timer:now_diff(T2, T1), Res}.
+    T2 = time_compat:monotonic_time(),
+    Diff = time_compat:convert_time_unit(T2 - T1, native, micro_seconds),
+    {Diff, Res}.
similarity index 90%
rename from rabbitmq-server/src/gatherer.erl
rename to deps/rabbit/src/gatherer.erl
index 89bd46ca378d26df032055b98e3804bf1788bb21..18302699a297f8fdb32adc94790fb674ee996eb2 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(gatherer).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(stop/1 :: (pid()) -> 'ok').
--spec(fork/1 :: (pid()) -> 'ok').
--spec(finish/1 :: (pid()) -> 'ok').
--spec(in/2 :: (pid(), any()) -> 'ok').
--spec(sync_in/2 :: (pid(), any()) -> 'ok').
--spec(out/1 :: (pid()) -> {'value', any()} | 'empty').
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec stop(pid()) -> 'ok'.
+-spec fork(pid()) -> 'ok'.
+-spec finish(pid()) -> 'ok'.
+-spec in(pid(), any()) -> 'ok'.
+-spec sync_in(pid(), any()) -> 'ok'.
+-spec out(pid()) -> {'value', any()} | 'empty'.
 
 %%----------------------------------------------------------------------------
 
@@ -120,8 +116,8 @@ handle_call(Msg, _From, State) ->
 handle_cast(finish, State = #gstate { forks = Forks, blocked = Blocked }) ->
     NewForks = Forks - 1,
     NewBlocked = case NewForks of
-                     0 -> [gen_server2:reply(From, empty) ||
-                              From <- queue:to_list(Blocked)],
+                     0 -> _ = [gen_server2:reply(From, empty) ||
+                                  From <- queue:to_list(Blocked)],
                           queue:new();
                      _ -> Blocked
                  end,
similarity index 82%
rename from rabbitmq-server/src/gm.erl
rename to deps/rabbit/src/gm.erl
index dbf9c295f9437de9c82f4b2853cca79db57a7524..41aa01f04d9546b63525d08d88742821ff648716 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(gm).
 %% For INSTR_MOD callbacks
 -export([call/3, cast/2, monitor/1, demonitor/1]).
 
--ifndef(use_specs).
--export([behaviour_info/1]).
--endif.
-
 -export([table_definitions/0]).
 
 -define(GROUP_TABLE, gm_group).
 
 -define(TAG, '$gm').
 
--ifdef(use_specs).
-
 -export_type([group_name/0]).
 
--type(group_name() :: any()).
--type(txn_fun() :: fun((fun(() -> any())) -> any())).
+-type group_name() :: any().
+-type txn_fun() :: fun((fun(() -> any())) -> any()).
 
--spec(create_tables/0 :: () -> 'ok' | {'aborted', any()}).
--spec(start_link/4 :: (group_name(), atom(), any(), txn_fun()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(leave/1 :: (pid()) -> 'ok').
--spec(broadcast/2 :: (pid(), any()) -> 'ok').
--spec(confirmed_broadcast/2 :: (pid(), any()) -> 'ok').
--spec(info/1 :: (pid()) -> rabbit_types:infos()).
--spec(validate_members/2 :: (pid(), [pid()]) -> 'ok').
--spec(forget_group/1 :: (group_name()) -> 'ok').
+-spec create_tables() -> 'ok' | {'aborted', any()}.
+-spec start_link(group_name(), atom(), any(), txn_fun()) ->
+          rabbit_types:ok_pid_or_error().
+-spec leave(pid()) -> 'ok'.
+-spec broadcast(pid(), any()) -> 'ok'.
+-spec confirmed_broadcast(pid(), any()) -> 'ok'.
+-spec info(pid()) -> rabbit_types:infos().
+-spec validate_members(pid(), [pid()]) -> 'ok'.
+-spec forget_group(group_name()) -> 'ok'.
 
 %% The joined, members_changed and handle_msg callbacks can all return
 %% any of the following terms:
 -callback handle_terminate(Args :: term(), Reason :: term()) ->
     ok | term().
 
--else.
-
-behaviour_info(callbacks) ->
-    [{joined, 2}, {members_changed, 3}, {handle_msg, 3}, {handle_terminate, 2}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
-
 create_tables() ->
     create_tables([?TABLE]).
 
@@ -551,8 +536,6 @@ forget_group(GroupName) ->
 
 init([GroupName, Module, Args, TxnFun]) ->
     put(process_name, {?MODULE, GroupName}),
-    {MegaSecs, Secs, MicroSecs} = now(),
-    random:seed(MegaSecs, Secs, MicroSecs),
     Self = make_member(GroupName),
     gen_server2:cast(self(), join),
     {ok, #state { self                = Self,
@@ -616,14 +599,20 @@ handle_call({add_on_right, NewMember}, _From,
                              group_name    = GroupName,
                              members_state = MembersState,
                              txn_executor  = TxnFun }) ->
-    Group = record_new_member_in_group(NewMember, Self, GroupName, TxnFun),
-    View1 = group_to_view(Group),
-    MembersState1 = remove_erased_members(MembersState, View1),
-    ok = send_right(NewMember, View1,
-                    {catchup, Self, prepare_members_state(MembersState1)}),
-    {Result, State1} = change_view(View1, State #state {
-                                            members_state = MembersState1 }),
-    handle_callback_result({Result, {ok, Group}, State1}).
+    try
+        Group = record_new_member_in_group(
+                  NewMember, Self, GroupName, TxnFun),
+        View1 = group_to_view(check_membership(Self, Group)),
+        MembersState1 = remove_erased_members(MembersState, View1),
+        ok = send_right(NewMember, View1,
+                        {catchup, Self, prepare_members_state(MembersState1)}),
+        {Result, State1} = change_view(View1, State #state {
+                                                members_state = MembersState1 }),
+        handle_callback_result({Result, {ok, Group}, State1})
+    catch
+        lost_membership ->
+            {stop, shutdown, State}
+    end.
 
 %% add_on_right causes a catchup to be sent immediately from the left,
 %% so we can never see this from the left neighbour. However, it's
@@ -637,19 +626,28 @@ handle_cast({?TAG, _ReqVer, check_neighbours},
 
 handle_cast({?TAG, ReqVer, Msg},
             State = #state { view          = View,
+                             self          = Self,
                              members_state = MembersState,
                              group_name    = GroupName }) ->
-    {Result, State1} =
-        case needs_view_update(ReqVer, View) of
-            true  -> View1 = group_to_view(dirty_read_group(GroupName)),
-                     MemberState1 = remove_erased_members(MembersState, View1),
-                     change_view(View1, State #state {
-                                          members_state = MemberState1 });
-            false -> {ok, State}
-        end,
-    handle_callback_result(
-      if_callback_success(
-        Result, fun handle_msg_true/3, fun handle_msg_false/3, Msg, State1));
+    try
+        {Result, State1} =
+            case needs_view_update(ReqVer, View) of
+                true  ->
+                    View1 = group_to_view(
+                              check_membership(Self,
+                                               dirty_read_group(GroupName))),
+                    MemberState1 = remove_erased_members(MembersState, View1),
+                    change_view(View1, State #state {
+                                         members_state = MemberState1 });
+                false -> {ok, State}
+            end,
+        handle_callback_result(
+          if_callback_success(
+            Result, fun handle_msg_true/3, fun handle_msg_false/3, Msg, State1))
+    catch
+        lost_membership ->
+            {stop, shutdown, State}
+    end;
 
 handle_cast({broadcast, _Msg, _SizeHint},
             State = #state { shutting_down = {true, _} }) ->
@@ -677,16 +675,21 @@ handle_cast(join, State = #state { self          = Self,
                                    module        = Module,
                                    callback_args = Args,
                                    txn_executor  = TxnFun }) ->
-    View = join_group(Self, GroupName, TxnFun),
-    MembersState =
-        case alive_view_members(View) of
-            [Self] -> blank_member_state();
-            _      -> undefined
-        end,
-    State1 = check_neighbours(State #state { view          = View,
-                                             members_state = MembersState }),
-    handle_callback_result(
-      {Module:joined(Args, get_pids(all_known_members(View))), State1});
+    try
+       View = join_group(Self, GroupName, TxnFun),
+       MembersState =
+           case alive_view_members(View) of
+               [Self] -> blank_member_state();
+               _      -> undefined
+           end,
+       State1 = check_neighbours(State #state { view          = View,
+                                                members_state = MembersState }),
+       handle_callback_result(
+         {Module:joined(Args, get_pids(all_known_members(View))), State1})
+    catch
+        lost_membership ->
+            {stop, shutdown, State}
+    end;
 
 handle_cast({validate_members, OldMembers},
             State = #state { view          = View,
@@ -712,6 +715,10 @@ handle_info(flush, State) ->
 handle_info(timeout, State) ->
     noreply(flush_broadcast_buffer(State));
 
+handle_info({'DOWN', _MRef, process, _Pid, _Reason},
+            State = #state { shutting_down =
+                                 {true, {shutdown, ring_shutdown}} }) ->
+    noreply(State);
 handle_info({'DOWN', MRef, process, _Pid, Reason},
             State = #state { self          = Self,
                              left          = Left,
@@ -719,43 +726,53 @@ handle_info({'DOWN', MRef, process, _Pid, Reason},
                              group_name    = GroupName,
                              confirms      = Confirms,
                              txn_executor  = TxnFun }) ->
-    Member = case {Left, Right} of
-                 {{Member1, MRef}, _} -> Member1;
-                 {_, {Member1, MRef}} -> Member1;
-                 _                    -> undefined
-             end,
-    case {Member, Reason} of
-        {undefined, _} ->
-            noreply(State);
-        {_, {shutdown, ring_shutdown}} ->
-            noreply(State);
-        _ ->
-            %% In the event of a partial partition we could see another member
-            %% go down and then remove them from Mnesia. While they can
-            %% recover from this they'd have to restart the queue - not
-            %% ideal. So let's sleep here briefly just in case this was caused
-            %% by a partial partition; in which case by the time we record the
-            %% member death in Mnesia we will probably be in a full
-            %% partition and will not be assassinating another member.
-            timer:sleep(100),
-            View1 = group_to_view(record_dead_member_in_group(
-                                    Member, GroupName, TxnFun)),
-            handle_callback_result(
-              case alive_view_members(View1) of
-                  [Self] -> maybe_erase_aliases(
-                              State #state {
-                                members_state = blank_member_state(),
-                                confirms      = purge_confirms(Confirms) },
-                              View1);
-                  _      -> change_view(View1, State)
-              end)
-    end.
-
+    try
+        check_membership(GroupName),
+        Member = case {Left, Right} of
+                     {{Member1, MRef}, _} -> Member1;
+                     {_, {Member1, MRef}} -> Member1;
+                     _                    -> undefined
+                 end,
+        case {Member, Reason} of
+            {undefined, _} ->
+                noreply(State);
+            {_, {shutdown, ring_shutdown}} ->
+                noreply(State);
+            _ ->
+                %% In the event of a partial partition we could see another member
+                %% go down and then remove them from Mnesia. While they can
+                %% recover from this they'd have to restart the queue - not
+                %% ideal. So let's sleep here briefly just in case this was caused
+                %% by a partial partition; in which case by the time we record the
+                %% member death in Mnesia we will probably be in a full
+                %% partition and will not be assassinating another member.
+                timer:sleep(100),
+                View1 = group_to_view(record_dead_member_in_group(Self,
+                                        Member, GroupName, TxnFun, true)),
+                handle_callback_result(
+                  case alive_view_members(View1) of
+                      [Self] -> maybe_erase_aliases(
+                                  State #state {
+                                    members_state = blank_member_state(),
+                                    confirms      = purge_confirms(Confirms) },
+                                  View1);
+                      _      -> change_view(View1, State)
+                  end)
+        end
+    catch
+        lost_membership ->
+            {stop, shutdown, State}
+    end;
+handle_info(_, State) ->
+    %% Discard any unexpected messages, such as late replies from neighbour_call/2
+    %% TODO: For #gm_group{} related info messages, it could be worthwhile to
+    %% change_view/2, as this might reflect an alteration in the gm group, meaning
+    %% we now need to update our state. see rabbitmq-server#914.
+    noreply(State).
 
 terminate(Reason, #state { module = Module, callback_args = Args }) ->
     Module:handle_terminate(Args, Reason).
 
-
 code_change(_OldVsn, State, _Extra) ->
     {ok, State}.
 
@@ -836,52 +853,30 @@ handle_msg({catchup, _NotLeft, _MembersState}, State) ->
 
 handle_msg({activity, Left, Activity},
            State = #state { self          = Self,
+                            group_name    = GroupName,
                             left          = {Left, _MRefL},
                             view          = View,
                             members_state = MembersState,
                             confirms      = Confirms })
   when MembersState =/= undefined ->
-    {MembersState1, {Confirms1, Activity1}} =
-        lists:foldl(
-          fun ({Id, Pubs, Acks}, MembersStateConfirmsActivity) ->
-                  with_member_acc(
-                    fun (Member = #member { pending_ack = PA,
-                                            last_pub    = LP,
-                                            last_ack    = LA },
-                         {Confirms2, Activity2}) ->
-                            case is_member_alias(Id, Self, View) of
-                                true ->
-                                    {ToAck, PA1} =
-                                        find_common(queue_from_pubs(Pubs), PA,
-                                                    queue:new()),
-                                    LA1 = last_ack(Acks, LA),
-                                    AckNums = acks_from_queue(ToAck),
-                                    Confirms3 = maybe_confirm(
-                                                  Self, Id, Confirms2, AckNums),
-                                    {Member #member { pending_ack = PA1,
-                                                      last_ack    = LA1 },
-                                     {Confirms3,
-                                      activity_cons(
-                                        Id, [], AckNums, Activity2)}};
-                                false ->
-                                    PA1 = apply_acks(Acks, join_pubs(PA, Pubs)),
-                                    LA1 = last_ack(Acks, LA),
-                                    LP1 = last_pub(Pubs, LP),
-                                    {Member #member { pending_ack = PA1,
-                                                      last_pub    = LP1,
-                                                      last_ack    = LA1 },
-                                     {Confirms2,
-                                      activity_cons(Id, Pubs, Acks, Activity2)}}
-                            end
-                    end, Id, MembersStateConfirmsActivity)
-          end, {MembersState, {Confirms, activity_nil()}}, Activity),
-    State1 = State #state { members_state = MembersState1,
-                            confirms      = Confirms1 },
-    Activity3 = activity_finalise(Activity1),
-    ok = maybe_send_activity(Activity3, State1),
-    {Result, State2} = maybe_erase_aliases(State1, View),
-    if_callback_success(
-      Result, fun activity_true/3, fun activity_false/3, Activity3, State2);
+    try
+        %% If we have to stop, do it asap so we avoid any ack confirmation
+        %% Membership must be checked again by erase_members_in_group, as the
+        %% node can be marked as dead on the meanwhile
+        check_membership(GroupName),
+        {MembersState1, {Confirms1, Activity1}} =
+            calculate_activity(MembersState, Confirms, Activity, Self, View),
+        State1 = State #state { members_state = MembersState1,
+                                confirms      = Confirms1 },
+        Activity3 = activity_finalise(Activity1),
+        ok = maybe_send_activity(Activity3, State1),
+        {Result, State2} = maybe_erase_aliases(State1, View),
+        if_callback_success(
+          Result, fun activity_true/3, fun activity_false/3, Activity3, State2)
+    catch
+        lost_membership ->
+            {{stop, shutdown}, State}
+    end;
 
 handle_msg({activity, _NotLeft, _Activity}, State) ->
     {ok, State}.
@@ -901,7 +896,7 @@ ensure_broadcast_timer(State = #state { broadcast_buffer = [],
     State;
 ensure_broadcast_timer(State = #state { broadcast_buffer = [],
                                         broadcast_timer  = TRef }) ->
-    erlang:cancel_timer(TRef),
+    _ = erlang:cancel_timer(TRef),
     State #state { broadcast_timer = undefined };
 ensure_broadcast_timer(State = #state { broadcast_timer = undefined }) ->
     TRef = erlang:send_after(?BROADCAST_TIMER, self(), flush),
@@ -1081,13 +1076,13 @@ join_group(Self, GroupName, #gm_group { members = Members } = Group, TxnFun) ->
                                prune_or_create_group(Self, GroupName, TxnFun),
                                TxnFun);
                 Alive ->
-                    Left = lists:nth(random:uniform(length(Alive)), Alive),
+                    Left = lists:nth(rand_compat:uniform(length(Alive)), Alive),
                     Handler =
                         fun () ->
                                 join_group(
                                   Self, GroupName,
-                                  record_dead_member_in_group(
-                                    Left, GroupName, TxnFun),
+                                  record_dead_member_in_group(Self,
+                                    Left, GroupName, TxnFun, false),
                                   TxnFun)
                         end,
                     try
@@ -1137,47 +1132,93 @@ prune_or_create_group(Self, GroupName, TxnFun) ->
               end
       end).
 
-record_dead_member_in_group(Member, GroupName, TxnFun) ->
-    TxnFun(
-      fun () ->
-              Group = #gm_group { members = Members, version = Ver } =
-                  read_group(GroupName),
-              case lists:splitwith(
-                     fun (Member1) -> Member1 =/= Member end, Members) of
-                  {_Members1, []} -> %% not found - already recorded dead
-                      Group;
-                  {Members1, [Member | Members2]} ->
-                      Members3 = Members1 ++ [{dead, Member} | Members2],
-                      write_group(Group #gm_group { members = Members3,
-                                                    version = Ver + 1 })
-              end
-      end).
+record_dead_member_in_group(Self, Member, GroupName, TxnFun, Verify) ->
+    Fun =
+        fun () ->
+                try
+                    Group = #gm_group { members = Members, version = Ver } =
+                        case Verify of
+                            true ->
+                                check_membership(Self, read_group(GroupName));
+                            false ->
+                                check_group(read_group(GroupName))
+                        end,
+                    case lists:splitwith(
+                           fun (Member1) -> Member1 =/= Member end, Members) of
+                        {_Members1, []} -> %% not found - already recorded dead
+                            Group;
+                        {Members1, [Member | Members2]} ->
+                            Members3 = Members1 ++ [{dead, Member} | Members2],
+                            write_group(Group #gm_group { members = Members3,
+                                                          version = Ver + 1 })
+                    end
+                catch
+                    lost_membership ->
+                        %% The transaction must not be abruptly crashed, but
+                        %% leave the gen_server to stop normally
+                        {error, lost_membership}
+                end
+        end,
+    handle_lost_membership_in_txn(TxnFun, Fun).
+
+handle_lost_membership_in_txn(TxnFun, Fun) ->
+    case TxnFun(Fun)  of
+        {error, lost_membership = T} ->
+            throw(T);
+        Any ->
+            Any
+    end.
 
 record_new_member_in_group(NewMember, Left, GroupName, TxnFun) ->
-    TxnFun(
-      fun () ->
-              Group = #gm_group { members = Members, version = Ver } =
-                  read_group(GroupName),
-              {Prefix, [Left | Suffix]} =
-                  lists:splitwith(fun (M) -> M =/= Left end, Members),
-              write_group(Group #gm_group {
-                            members = Prefix ++ [Left, NewMember | Suffix],
-                            version = Ver + 1 })
-      end).
+    Fun =
+        fun () ->
+                try
+                    Group = #gm_group { members = Members, version = Ver } =
+                        check_membership(Left, read_group(GroupName)),
+                    case lists:member(NewMember, Members) of
+                        true ->
+                            %% This avois duplicates during partial partitions,
+                            %% as inconsistent views might happen during them
+                            rabbit_log:warning("(~p) GM avoiding duplicate of ~p",
+                                               [self(), NewMember]),
+                            Group;
+                        false ->
+                            {Prefix, [Left | Suffix]} =
+                                lists:splitwith(fun (M) -> M =/= Left end, Members),
+                            write_group(Group #gm_group {
+                                          members = Prefix ++ [Left, NewMember | Suffix],
+                                          version = Ver + 1 })
+                    end
+                catch
+                    lost_membership ->
+                        %% The transaction must not be abruptly crashed, but
+                        %% leave the gen_server to stop normally
+                        {error, lost_membership}
+                end
+        end,
+    handle_lost_membership_in_txn(TxnFun, Fun).
 
-erase_members_in_group(Members, GroupName, TxnFun) ->
+erase_members_in_group(Self, Members, GroupName, TxnFun) ->
     DeadMembers = [{dead, Id} || Id <- Members],
-    TxnFun(
-      fun () ->
-              Group = #gm_group { members = [_|_] = Members1, version = Ver } =
-                  read_group(GroupName),
-              case Members1 -- DeadMembers of
-                  Members1 -> Group;
-                  Members2 -> write_group(
-                                Group #gm_group { members = Members2,
-                                                  version = Ver + 1 })
+    Fun =
+        fun () ->
+                try
+                    Group = #gm_group { members = [_|_] = Members1, version = Ver } =
+                        check_membership(Self, read_group(GroupName)),
+                    case Members1 -- DeadMembers of
+                        Members1 -> Group;
+                        Members2 -> write_group(
+                                      Group #gm_group { members = Members2,
+                                                        version = Ver + 1 })
+                    end
+              catch
+                  lost_membership ->
+                      %% The transaction must not be abruptly crashed, but
+                      %% leave the gen_server to stop normally
+                      {error, lost_membership}
               end
-      end).
+        end,
+    handle_lost_membership_in_txn(TxnFun, Fun).
 
 maybe_erase_aliases(State = #state { self          = Self,
                                      group_name    = GroupName,
@@ -1198,7 +1239,7 @@ maybe_erase_aliases(State = #state { self          = Self,
     View1 = case Erasable of
                 [] -> View;
                 _  -> group_to_view(
-                        erase_members_in_group(Erasable, GroupName, TxnFun))
+                        erase_members_in_group(Self, Erasable, GroupName, TxnFun))
             end,
     change_view(View1, State #state { members_state = MembersState1 }).
 
@@ -1298,7 +1339,11 @@ find_common(A, B, Common) ->
         {{{value, Val}, A1}, {{value, Val}, B1}} ->
             find_common(A1, B1, queue:in(Val, Common));
         {{empty, _A}, _} ->
-            {Common, B}
+            {Common, B};
+        {_, {_, B1}} ->
+            find_common(A, B1, Common);
+        {{_, A1}, _} ->
+            find_common(A1, B, Common)
     end.
 
 
@@ -1373,6 +1418,41 @@ maybe_send_activity(Activity, #state { self  = Self,
 send_right(Right, View, Msg) ->
     ok = neighbour_cast(Right, {?TAG, view_version(View), Msg}).
 
+calculate_activity(MembersState, Confirms, Activity, Self, View) ->
+    lists:foldl(
+      fun ({Id, Pubs, Acks}, MembersStateConfirmsActivity) ->
+              with_member_acc(
+                fun (Member = #member { pending_ack = PA,
+                                        last_pub    = LP,
+                                        last_ack    = LA },
+                     {Confirms2, Activity2}) ->
+                        case is_member_alias(Id, Self, View) of
+                            true ->
+                                {ToAck, PA1} =
+                                    find_common(queue_from_pubs(Pubs), PA,
+                                                queue:new()),
+                                LA1 = last_ack(Acks, LA),
+                                AckNums = acks_from_queue(ToAck),
+                                Confirms3 = maybe_confirm(
+                                              Self, Id, Confirms2, AckNums),
+                                {Member #member { pending_ack = PA1,
+                                                  last_ack    = LA1 },
+                                 {Confirms3,
+                                  activity_cons(
+                                    Id, [], AckNums, Activity2)}};
+                            false ->
+                                PA1 = apply_acks(Acks, join_pubs(PA, Pubs)),
+                                LA1 = last_ack(Acks, LA),
+                                LP1 = last_pub(Pubs, LP),
+                                {Member #member { pending_ack = PA1,
+                                                  last_pub    = LP1,
+                                                  last_ack    = LA1 },
+                                 {Confirms2,
+                                  activity_cons(Id, Pubs, Acks, Activity2)}}
+                        end
+                end, Id, MembersStateConfirmsActivity)
+      end, {MembersState, {Confirms, activity_nil()}}, Activity).
+
 callback(Args, Module, Activity) ->
     Result =
       lists:foldl(
@@ -1486,7 +1566,7 @@ maybe_confirm(_Self, _Id, Confirms, _PubNums) ->
     Confirms.
 
 purge_confirms(Confirms) ->
-    [gen_server2:reply(From, ok) || {_PubNum, From} <- queue:to_list(Confirms)],
+    _ = [gen_server2:reply(From, ok) || {_PubNum, From} <- queue:to_list(Confirms)],
     queue:new().
 
 
@@ -1525,3 +1605,31 @@ call(Pid, Msg, Timeout) -> gen_server2:call(Pid, Msg, Timeout).
 cast(Pid, Msg)          -> gen_server2:cast(Pid, Msg).
 monitor(Pid)            -> erlang:monitor(process, Pid).
 demonitor(MRef)         -> erlang:demonitor(MRef).
+
+check_membership(Self, #gm_group{members = M} = Group) ->
+    case lists:member(Self, M) of
+        true ->
+            Group;
+        false ->
+            throw(lost_membership)
+    end;
+check_membership(_Self, {error, not_found}) ->
+    throw(lost_membership).
+
+check_membership(GroupName) ->
+    case dirty_read_group(GroupName) of
+        #gm_group{members = M} ->
+            case lists:keymember(self(), 2, M) of
+                true ->
+                    ok;
+                false ->
+                    throw(lost_membership)
+            end;
+        {error, not_found} ->
+            throw(lost_membership)
+    end.
+
+check_group({error, not_found}) ->
+    throw(lost_membership);
+check_group(Any) ->
+    Any.
similarity index 69%
rename from rabbitmq-server/src/lqueue.erl
rename to deps/rabbit/src/lqueue.erl
index 4e78346febe94175acd0a7ba56c29e362d5a65d2..fc7157dff1048981ad22d7bc6f247d5749770833 100644 (file)
 
 -define(QUEUE, queue).
 
--ifdef(use_specs).
-
 -export_type([?MODULE/0]).
 
--opaque(?MODULE() :: {non_neg_integer(), ?QUEUE:?QUEUE()}).
--type(value()     :: any()).
--type(result()    :: 'empty' | {'value', value()}).
-
--spec(new/0       :: () -> ?MODULE()).
--spec(is_empty/1  :: (?MODULE()) -> boolean()).
--spec(len/1       :: (?MODULE()) -> non_neg_integer()).
--spec(in/2        :: (value(), ?MODULE()) -> ?MODULE()).
--spec(in_r/2      :: (value(), ?MODULE()) -> ?MODULE()).
--spec(out/1       :: (?MODULE()) -> {result(), ?MODULE()}).
--spec(out_r/1     :: (?MODULE()) -> {result(), ?MODULE()}).
--spec(join/2      :: (?MODULE(), ?MODULE()) -> ?MODULE()).
--spec(foldl/3     :: (fun ((value(), B) -> B), B, ?MODULE()) -> B).
--spec(foldr/3     :: (fun ((value(), B) -> B), B, ?MODULE()) -> B).
--spec(from_list/1 :: ([value()]) -> ?MODULE()).
--spec(to_list/1   :: (?MODULE()) -> [value()]).
--spec(peek/1      :: (?MODULE()) -> result()).
--spec(peek_r/1    :: (?MODULE()) -> result()).
-
--endif.
+-include_lib("rabbit_common/include/old_builtin_types.hrl").
+
+-opaque ?MODULE() :: {non_neg_integer(), ?QUEUE_TYPE()}.
+-type value()     :: any().
+-type result()    :: 'empty' | {'value', value()}.
+
+-spec new() -> ?MODULE().
+-spec is_empty(?MODULE()) -> boolean().
+-spec len(?MODULE()) -> non_neg_integer().
+-spec in(value(), ?MODULE()) -> ?MODULE().
+-spec in_r(value(), ?MODULE()) -> ?MODULE().
+-spec out(?MODULE()) -> {result(), ?MODULE()}.
+-spec out_r(?MODULE()) -> {result(), ?MODULE()}.
+-spec join(?MODULE(), ?MODULE()) -> ?MODULE().
+-spec foldl(fun ((value(), B) -> B), B, ?MODULE()) -> B.
+-spec foldr(fun ((value(), B) -> B), B, ?MODULE()) -> B.
+-spec from_list([value()]) -> ?MODULE().
+-spec to_list(?MODULE()) -> [value()].
+-spec peek(?MODULE()) -> result().
+-spec peek_r(?MODULE()) -> result().
 
 new() -> {0, ?QUEUE:new()}.
 
similarity index 92%
rename from rabbitmq-server/src/mnesia_sync.erl
rename to deps/rabbit/src/mnesia_sync.erl
index 153017bca807be092e24fab03f9ee2e847c229fa..8d5c94663556b0750bcf0d79f28ebee9136198f6 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(mnesia_sync).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(sync/0 :: () -> 'ok').
-
--endif.
+-spec sync() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -65,7 +61,7 @@ handle_cast(Request, State) ->
 
 handle_info(timeout, #state{waiting = Waiting} = State) ->
     ok = disk_log:sync(latest_log),
-    [gen_server:reply(From, ok) || From <- Waiting],
+    _ = [gen_server:reply(From, ok) || From <- Waiting],
     {noreply, State#state{waiting = []}};
 handle_info(Message, State) ->
     {stop, {unhandled_info, Message}, State}.
similarity index 89%
rename from rabbitmq-server/src/pg_local.erl
rename to deps/rabbit/src/pg_local.erl
index 4d9914d9b75f3ea938288cccfe2481778917dcd7..e1f5219dcb0dd9ac4d116d8311da003a5ebe65cc 100644 (file)
 %% All modifications are (C) 2010-2013 GoPivotal, Inc.
 
 %% %CopyrightBegin%
-%% 
+%%
 %% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%% 
+%%
 %% The contents of this file are subject to the Erlang Public License,
 %% Version 1.1, (the "License"); you may not use this file except in
 %% compliance with the License. You should have received a copy of the
 %% Erlang Public License along with this software. If not, it can be
 %% retrieved online at http://www.erlang.org/.
-%% 
+%%
 %% Software distributed under the License is distributed on an "AS IS"
 %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
 %% the License for the specific language governing rights and limitations
 %% under the License.
-%% 
+%%
 %% %CopyrightEnd%
 %%
 -module(pg_local).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(name() :: term()).
-
--spec(start_link/0 :: () -> {'ok', pid()} | {'error', any()}).
--spec(start/0 :: () -> {'ok', pid()} | {'error', any()}).
--spec(join/2 :: (name(), pid()) -> 'ok').
--spec(leave/2 :: (name(), pid()) -> 'ok').
--spec(get_members/1 :: (name()) -> [pid()]).
--spec(in_group/2 :: (name(), pid()) -> boolean()).
+-type name() :: term().
 
--spec(sync/0 :: () -> 'ok').
+-spec start_link() -> {'ok', pid()} | {'error', any()}.
+-spec start() -> {'ok', pid()} | {'error', any()}.
+-spec join(name(), pid()) -> 'ok'.
+-spec leave(name(), pid()) -> 'ok'.
+-spec get_members(name()) -> [pid()].
+-spec in_group(name(), pid()) -> boolean().
 
--endif.
+-spec sync() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -71,19 +67,19 @@ start() ->
     ensure_started().
 
 join(Name, Pid) when is_pid(Pid) ->
-    ensure_started(),
+    _ = ensure_started(),
     gen_server:cast(?MODULE, {join, Name, Pid}).
 
 leave(Name, Pid) when is_pid(Pid) ->
-    ensure_started(),
+    _ = ensure_started(),
     gen_server:cast(?MODULE, {leave, Name, Pid}).
 
 get_members(Name) ->
-    ensure_started(),
+    _ = ensure_started(),
     group_members(Name).
 
 in_group(Name, Pid) ->
-    ensure_started(),
+    _ = ensure_started(),
     %% The join message is a cast and thus can race, but we want to
     %% keep it that way to be fast in the common case.
     case member_present(Name, Pid) of
@@ -93,7 +89,7 @@ in_group(Name, Pid) ->
     end.
 
 sync() ->
-    ensure_started(),
+    _ = ensure_started(),
     gen_server:call(?MODULE, sync, infinity).
 
 %%%
@@ -111,12 +107,12 @@ handle_call(sync, _From, S) ->
 
 handle_call(Request, From, S) ->
     error_logger:warning_msg("The pg_local server received an unexpected message:\n"
-                             "handle_call(~p, ~p, _)\n", 
+                             "handle_call(~p, ~p, _)\n",
                              [Request, From]),
     {noreply, S}.
 
 handle_cast({join, Name, Pid}, S) ->
-    join_group(Name, Pid),
+    _ = join_group(Name, Pid),
     {noreply, S};
 handle_cast({leave, Name, Pid}, S) ->
     leave_group(Name, Pid),
@@ -155,13 +151,13 @@ terminate(_Reason, _S) ->
 member_died(Ref) ->
     [{{ref, Ref}, Pid}] = ets:lookup(pg_local_table, {ref, Ref}),
     Names = member_groups(Pid),
-    _ = [leave_group(Name, P) || 
+    _ = [leave_group(Name, P) ||
             Name <- Names,
             P <- member_in_group(Pid, Name)],
     ok.
 
 join_group(Name, Pid) ->
-    Ref_Pid = {ref, Pid}, 
+    Ref_Pid = {ref, Pid},
     try _ = ets:update_counter(pg_local_table, Ref_Pid, {3, +1})
     catch _:_ ->
             Ref = erlang:monitor(process, Pid),
@@ -179,14 +175,14 @@ leave_group(Name, Pid) ->
     Member_Name_Pid = {member, Name, Pid},
     try ets:update_counter(pg_local_table, Member_Name_Pid, {2, -1}) of
         N ->
-            if 
+            if
                 N =:= 0 ->
                     true = ets:delete(pg_local_table, {pid, Pid, Name}),
                     true = ets:delete(pg_local_table, Member_Name_Pid);
                 true ->
                     ok
             end,
-            Ref_Pid = {ref, Pid}, 
+            Ref_Pid = {ref, Pid},
             case ets:update_counter(pg_local_table, Ref_Pid, {3, -1}) of
                 0 ->
                     [{Ref_Pid,Ref,0}] = ets:lookup(pg_local_table, Ref_Pid),
@@ -202,7 +198,7 @@ leave_group(Name, Pid) ->
     end.
 
 group_members(Name) ->
-    [P || 
+    [P ||
         [P, N] <- ets:match(pg_local_table, {{member, Name, '$1'},'$2'}),
         _ <- lists:seq(1, N)].
 
similarity index 82%
rename from rabbitmq-server/ebin/rabbit_app.in
rename to deps/rabbit/src/rabbit.app.src
index bd4a9f33c031c6297172efc5500ce58c606cb3c3..fe5a4c56f8097ddc98bba501a9d748575a4e6364 100644 (file)
@@ -1,24 +1,26 @@
 {application, rabbit,           %% -*- erlang -*-
  [{description, "RabbitMQ"},
   {id, "RabbitMQ"},
-  {vsn, "3.5.6"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, [rabbit_amqqueue_sup,
                 rabbit_log,
                 rabbit_node_monitor,
                 rabbit_router,
                 rabbit_sup,
-                rabbit_tcp_client_sup,
                 rabbit_direct_client_sup]},
-  {applications, [kernel, stdlib, sasl, mnesia, os_mon, xmerl]},
+  {applications, [kernel, stdlib, sasl, mnesia, rabbit_common, ranch, os_mon, xmerl]},
 %% we also depend on crypto, public_key and ssl but they shouldn't be
 %% in here as we don't actually want to start it
   {mod, {rabbit, []}},
   {env, [{tcp_listeners, [5672]},
+         {num_tcp_acceptors, 10},
          {ssl_listeners, []},
+         {num_ssl_acceptors, 1},
          {ssl_options, []},
          {vm_memory_high_watermark, 0.4},
          {vm_memory_high_watermark_paging_ratio, 0.5},
+         {memory_monitor_interval, 2500},
          {disk_free_limit, 50000000}, %% 50MB
          {msg_store_index_module, rabbit_msg_store_ets_index},
          {backing_queue_module, rabbit_variable_queue},
@@ -29,7 +31,7 @@
          {heartbeat, 60},
          {msg_store_file_size_limit, 16777216},
          {fhc_write_buffering, true},
-         {fhc_read_buffering, true},
+         {fhc_read_buffering, false},
          {queue_index_max_journal_entries, 32768},
          {queue_index_embed_msgs_below, 4096},
          {default_user, <<"guest">>},
@@ -38,6 +40,7 @@
          {default_vhost, <<"/">>},
          {default_permissions, [<<".*">>, <<".*">>, <<".*">>]},
          {loopback_users, [<<"guest">>]},
+         {password_hashing_module, rabbit_password_hashing_sha256},
          {cluster_nodes, {[], disc}},
          {server_properties, []},
          {collect_statistics, none},
          {reverse_dns_lookups, false},
          {cluster_partition_handling, ignore},
          {cluster_keepalive_interval, 10000},
-         {tcp_listen_options, [binary,
-                               {packet,        raw},
-                               {reuseaddr,     true},
-                               {backlog,       128},
+         {tcp_listen_options, [{backlog,       128},
                                {nodelay,       true},
                                {linger,        {true, 0}},
                                {exit_on_close, false}]},
@@ -84,6 +84,7 @@
          {ssl_apps, [asn1, crypto, public_key, ssl]},
          %% see rabbitmq-server#114
          {mirroring_flow_control, true},
+         {mirroring_sync_batch_size, 4096},
          %% see rabbitmq-server#227 and related tickets.
          %% msg_store_credit_disc_bound only takes effect when
          %% messages are persisted to the message store. If messages
          {msg_store_credit_disc_bound, {2000, 500}},
          {msg_store_io_batch_size, 2048},
          %% see rabbitmq-server#143
-         {credit_flow_default_credit, {200, 50}}
+         %% and rabbitmq-server#949
+         {credit_flow_default_credit, {200, 100}},
+         %% see rabbitmq-server#248
+         %% and rabbitmq-server#667
+         {channel_operation_timeout, 15000},
+         {config_entry_decoder, [
+             {cipher, aes_cbc256},
+             {hash, sha512},
+             {iterations, 1000},
+             {passphrase, undefined}
+         ]},
+         %% rabbitmq-server-973
+         {lazy_queue_explicit_gc_run_operation_threshold, 250}
         ]}]}.
similarity index 74%
rename from rabbitmq-server/src/rabbit.erl
rename to deps/rabbit/src/rabbit.erl
index bb906ede4f7a8fad5c02edd0893db7ff4a7cf82e..1f0df1ad06e20447696112bead7744319a5cff34 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit).
@@ -22,9 +22,9 @@
          stop_and_halt/0, await_startup/0, status/0, is_running/0,
          is_running/1, environment/0, rotate_logs/1, force_event_refresh/1,
          start_fhc/0]).
--export([start/2, stop/1]).
+-export([start/2, stop/1, prep_stop/1]).
 -export([start_apps/1, stop_apps/1]).
--export([log_location/1, config_files/0]). %% for testing and mgmt-agent
+-export([log_location/1, config_files/0, decrypt_config/2]). %% for testing and mgmt-agent
 
 %%---------------------------------------------------------------------------
 %% Boot steps.
 -include("rabbit_framing.hrl").
 -include("rabbit.hrl").
 
--define(APPS, [os_mon, mnesia, rabbit]).
+-define(APPS, [os_mon, mnesia, rabbit_common, rabbit]).
 
-%% HiPE compilation uses multiple cores anyway, but some bits are
-%% IO-bound so we can go faster if we parallelise a bit more. In
-%% practice 2 processes seems just as fast as any other number > 1,
-%% and keeps the progress bar realistic-ish.
--define(HIPE_PROCESSES, 2).
 -define(ASYNC_THREADS_WARNING_THRESHOLD, 8).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(file_suffix() :: binary()).
+-type file_suffix() :: binary().
 %% this really should be an abstract type
--type(log_location() :: 'tty' | 'undefined' | file:filename()).
--type(param() :: atom()).
--type(app_name() :: atom()).
-
--spec(start/0 :: () -> 'ok').
--spec(boot/0 :: () -> 'ok').
--spec(stop/0 :: () -> 'ok').
--spec(stop_and_halt/0 :: () -> no_return()).
--spec(await_startup/0 :: () -> 'ok').
--spec(status/0 ::
+-type log_location() :: 'tty' | 'undefined' | file:filename().
+-type param() :: atom().
+-type app_name() :: atom().
+
+-spec start() -> 'ok'.
+-spec boot() -> 'ok'.
+-spec stop() -> 'ok'.
+-spec stop_and_halt() -> no_return().
+-spec await_startup() -> 'ok'.
+-spec status
         () -> [{pid, integer()} |
                {running_applications, [{atom(), string(), string()}]} |
                {os, {atom(), atom()}} |
                {erlang_version, string()} |
-               {memory, any()}]).
--spec(is_running/0 :: () -> boolean()).
--spec(is_running/1 :: (node()) -> boolean()).
--spec(environment/0 :: () -> [{param(), term()}]).
--spec(rotate_logs/1 :: (file_suffix()) -> rabbit_types:ok_or_error(any())).
--spec(force_event_refresh/1 :: (reference()) -> 'ok').
-
--spec(log_location/1 :: ('sasl' | 'kernel') -> log_location()).
-
--spec(start/2 :: ('normal',[]) ->
-                     {'error',
-                      {'erlang_version_too_old',
-                       {'found',[any()]},
-                       {'required',[any(),...]}}} |
-                     {'ok',pid()}).
--spec(stop/1 :: (_) -> 'ok').
-
--spec(maybe_insert_default_data/0 :: () -> 'ok').
--spec(boot_delegate/0 :: () -> 'ok').
--spec(recover/0 :: () -> 'ok').
--spec(start_apps/1 :: ([app_name()]) -> 'ok').
--spec(stop_apps/1 :: ([app_name()]) -> 'ok').
-
--endif.
+               {memory, any()}].
+-spec is_running() -> boolean().
+-spec is_running(node()) -> boolean().
+-spec environment() -> [{param(), term()}].
+-spec rotate_logs(file_suffix()) -> rabbit_types:ok_or_error(any()).
+-spec force_event_refresh(reference()) -> 'ok'.
+
+-spec log_location('sasl' | 'kernel') -> log_location().
+
+-spec start('normal',[]) ->
+          {'error',
+           {'erlang_version_too_old',
+            {'found',string(),string()},
+            {'required',string(),string()}}} |
+          {'ok',pid()}.
+-spec stop(_) -> 'ok'.
+
+-spec maybe_insert_default_data() -> 'ok'.
+-spec boot_delegate() -> 'ok'.
+-spec recover() -> 'ok'.
+-spec start_apps([app_name()]) -> 'ok'.
+-spec stop_apps([app_name()]) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
-%% HiPE compilation happens before we have log handlers - so we have
-%% to io:format/2, it's all we can do.
-
-maybe_hipe_compile() ->
-    {ok, Want} = application:get_env(rabbit, hipe_compile),
-    Can = code:which(hipe) =/= non_existing,
-    case {Want, Can} of
-        {true,  true}  -> hipe_compile();
-        {true,  false} -> false;
-        {false, _}     -> {ok, disabled}
-    end.
-
-log_hipe_result({ok, disabled}) ->
-    ok;
-log_hipe_result({ok, Count, Duration}) ->
-    rabbit_log:info(
-      "HiPE in use: compiled ~B modules in ~Bs.~n", [Count, Duration]);
-log_hipe_result(false) ->
-    io:format(
-      "~nNot HiPE compiling: HiPE not found in this Erlang installation.~n"),
-    rabbit_log:warning(
-      "Not HiPE compiling: HiPE not found in this Erlang installation.~n").
-
-%% HiPE compilation happens before we have log handlers and can take a
-%% long time, so make an exception to our no-stdout policy and display
-%% progress via stdout.
-hipe_compile() ->
-    {ok, HipeModulesAll} = application:get_env(rabbit, hipe_modules),
-    HipeModules = [HM || HM <- HipeModulesAll, code:which(HM) =/= non_existing],
-    Count = length(HipeModules),
-    io:format("~nHiPE compiling:  |~s|~n                 |",
-              [string:copies("-", Count)]),
-    T1 = erlang:now(),
-    PidMRefs = [spawn_monitor(fun () -> [begin
-                                             {ok, M} = hipe:c(M, [o3]),
-                                             io:format("#")
-                                         end || M <- Ms]
-                              end) ||
-                   Ms <- split(HipeModules, ?HIPE_PROCESSES)],
-    [receive
-         {'DOWN', MRef, process, _, normal} -> ok;
-         {'DOWN', MRef, process, _, Reason} -> exit(Reason)
-     end || {_Pid, MRef} <- PidMRefs],
-    T2 = erlang:now(),
-    Duration = timer:now_diff(T2, T1) div 1000000,
-    io:format("|~n~nCompiled ~B modules in ~Bs~n", [Count, Duration]),
-    {ok, Count, Duration}.
-
-split(L, N) -> split0(L, [[] || _ <- lists:seq(1, N)]).
-
-split0([],       Ls)       -> Ls;
-split0([I | Is], [L | Ls]) -> split0(Is, Ls ++ [[I | L]]).
-
 ensure_application_loaded() ->
     %% We end up looking at the rabbit app's env for HiPE and log
     %% handling, so it needs to be loaded. But during the tests, it
@@ -312,10 +250,12 @@ ensure_application_loaded() ->
 
 start() ->
     start_it(fun() ->
-                     %% We do not want to HiPE compile or upgrade
-                     %% mnesia after just restarting the app
+                     %% We do not want to upgrade mnesia after just
+                     %% restarting the app.
                      ok = ensure_application_loaded(),
+                     HipeResult = rabbit_hipe:maybe_hipe_compile(),
                      ok = ensure_working_log_handlers(),
+                     rabbit_hipe:log_hipe_result(HipeResult),
                      rabbit_node_monitor:prepare_cluster_status_files(),
                      rabbit_mnesia:check_cluster_consistency(),
                      broker_start()
@@ -324,9 +264,9 @@ start() ->
 boot() ->
     start_it(fun() ->
                      ok = ensure_application_loaded(),
-                     HipeResult = maybe_hipe_compile(),
+                     HipeResult = rabbit_hipe:maybe_hipe_compile(),
                      ok = ensure_working_log_handlers(),
-                     log_hipe_result(HipeResult),
+                     rabbit_hipe:log_hipe_result(HipeResult),
                      rabbit_node_monitor:prepare_cluster_status_files(),
                      ok = rabbit_upgrade:maybe_upgrade_mnesia(),
                      %% It's important that the consistency check happens after
@@ -340,13 +280,120 @@ broker_start() ->
     Plugins = rabbit_plugins:setup(),
     ToBeLoaded = Plugins ++ ?APPS,
     start_apps(ToBeLoaded),
-    case code:load_file(sd_notify) of
-        {module, sd_notify} -> SDNotify = sd_notify,
-                               SDNotify:sd_notify(0, "READY=1");
-        {error, _} -> ok
-    end,
+    maybe_sd_notify(),
     ok = log_broker_started(rabbit_plugins:active()).
 
+%% Try to send systemd ready notification if it makes sense in the
+%% current environment. standard_error is used intentionally in all
+%% logging statements, so all this messages will end in systemd
+%% journal.
+maybe_sd_notify() ->
+    case sd_notify_ready() of
+        false ->
+            io:format(standard_error, "systemd READY notification failed, beware of timeouts~n", []);
+        _ ->
+            ok
+    end.
+
+sd_notify_ready() ->
+    case {os:type(), os:getenv("NOTIFY_SOCKET")} of
+        {{win32, _}, _} ->
+            true;
+        {_, [_|_]} -> %% Non-empty NOTIFY_SOCKET, give it a try
+            sd_notify_legacy() orelse sd_notify_socat();
+        _ ->
+            true
+    end.
+
+sd_notify_data() ->
+    "READY=1\nSTATUS=Initialized\nMAINPID=" ++ os:getpid() ++ "\n".
+
+sd_notify_legacy() ->
+    case code:load_file(sd_notify) of
+        {module, sd_notify} ->
+            SDNotify = sd_notify,
+            SDNotify:sd_notify(0, sd_notify_data()),
+            true;
+        {error, _} ->
+            false
+    end.
+
+%% socat(1) is the most portable way the sd_notify could be
+%% implemented in erlang, without introducing some NIF. Currently the
+%% following issues prevent us from implementing it in a more
+%% reasonable way:
+%% - systemd-notify(1) is unstable for non-root users
+%% - erlang doesn't support unix domain sockets.
+%%
+%% Some details on how we ended with such a solution:
+%%   https://github.com/rabbitmq/rabbitmq-server/issues/664
+sd_notify_socat() ->
+    case sd_current_unit() of
+        {ok, Unit} ->
+            io:format(standard_error, "systemd unit for activation check: \"~s\"~n", [Unit]),
+            sd_notify_socat(Unit);
+        _ ->
+            false
+    end.
+
+socat_socket_arg("@" ++ AbstractUnixSocket) ->
+    "abstract-sendto:" ++ AbstractUnixSocket;
+socat_socket_arg(UnixSocket) ->
+    "unix-sendto:" ++ UnixSocket.
+
+sd_open_port() ->
+    open_port(
+      {spawn_executable, os:find_executable("socat")},
+      [{args, [socat_socket_arg(os:getenv("NOTIFY_SOCKET")), "STDIO"]},
+       use_stdio, out]).
+
+sd_notify_socat(Unit) ->
+    case sd_open_port() of
+        {'EXIT', Exit} ->
+            io:format(standard_error, "Failed to start socat ~p~n", [Exit]),
+            false;
+        Port ->
+            Port ! {self(), {command, sd_notify_data()}},
+            Result = sd_wait_activation(Port, Unit),
+            port_close(Port),
+            Result
+    end.
+
+sd_current_unit() ->
+    case catch re:run(os:cmd("systemctl status " ++ os:getpid()), "([-.@0-9a-zA-Z]+)", [unicode, {capture, all_but_first, list}]) of
+        {'EXIT', _} ->
+            error;
+        {match, [Unit]} ->
+            {ok, Unit};
+        _ ->
+            error
+    end.
+
+sd_wait_activation(Port, Unit) ->
+    case os:find_executable("systemctl") of
+        false ->
+            io:format(standard_error, "'systemctl' unavailable, falling back to sleep~n", []),
+            timer:sleep(5000),
+            true;
+        _ ->
+            sd_wait_activation(Port, Unit, 10)
+    end.
+
+sd_wait_activation(_, _, 0) ->
+    io:format(standard_error, "Service still in 'activating' state, bailing out~n", []),
+    false;
+sd_wait_activation(Port, Unit, AttemptsLeft) ->
+    case os:cmd("systemctl show --property=ActiveState " ++ Unit) of
+        "ActiveState=activating\n" ->
+            timer:sleep(1000),
+            sd_wait_activation(Port, Unit, AttemptsLeft - 1);
+        "ActiveState=" ++ _ ->
+            true;
+        _ = Err->
+            io:format(standard_error, "Unexpected status from systemd ~p~n", [Err]),
+            false
+    end.
+
 start_it(StartFun) ->
     Marker = spawn_link(fun() -> receive stop -> ok end end),
     case catch register(rabbit_boot, Marker) of
@@ -385,26 +432,135 @@ stop_and_halt() ->
         stop()
     after
         rabbit_log:info("Halting Erlang VM~n", []),
+        %% Also duplicate this information to stderr, so console where
+        %% foreground broker was running (or systemd journal) will
+        %% contain information about graceful termination.
+        io:format(standard_error, "Gracefully halting Erlang VM~n", []),
         init:stop()
     end,
     ok.
 
 start_apps(Apps) ->
     app_utils:load_applications(Apps),
+
+    ConfigEntryDecoder = case application:get_env(rabbit, config_entry_decoder) of
+        undefined ->
+            [];
+        {ok, Val} ->
+            Val
+    end,
+    PassPhrase = case proplists:get_value(passphrase, ConfigEntryDecoder) of
+        prompt ->
+            IoDevice = get_input_iodevice(),
+            io:setopts(IoDevice, [{echo, false}]),
+            PP = lists:droplast(io:get_line(IoDevice,
+                "\nPlease enter the passphrase to unlock encrypted "
+                "configuration entries.\n\nPassphrase: ")),
+            io:setopts(IoDevice, [{echo, true}]),
+            io:format(IoDevice, "~n", []),
+            PP;
+        {file, Filename} ->
+            {ok, File} = file:read_file(Filename),
+            [PP|_] = binary:split(File, [<<"\r\n">>, <<"\n">>]),
+            PP;
+        PP ->
+            PP
+    end,
+    Algo = {
+        proplists:get_value(cipher, ConfigEntryDecoder, rabbit_pbe:default_cipher()),
+        proplists:get_value(hash, ConfigEntryDecoder, rabbit_pbe:default_hash()),
+        proplists:get_value(iterations, ConfigEntryDecoder, rabbit_pbe:default_iterations()),
+        PassPhrase
+    },
+    decrypt_config(Apps, Algo),
+
     OrderedApps = app_utils:app_dependency_order(Apps, false),
     case lists:member(rabbit, Apps) of
-        false -> run_boot_steps(Apps); %% plugin activation
+        false -> rabbit_boot_steps:run_boot_steps(Apps); %% plugin activation
         true  -> ok                    %% will run during start of rabbit app
     end,
     ok = app_utils:start_applications(OrderedApps,
                                       handle_app_error(could_not_start)).
 
+%% This function retrieves the correct IoDevice for requesting
+%% input. The problem with using the default IoDevice is that
+%% the Erlang shell prevents us from getting the input.
+%%
+%% Instead we therefore look for the io process used by the
+%% shell and if it can't be found (because the shell is not
+%% started e.g with -noshell) we use the 'user' process.
+%%
+%% This function will not work when either -oldshell or -noinput
+%% options are passed to erl.
+get_input_iodevice() ->
+    case whereis(user) of
+        undefined -> user;
+        User ->
+            case group:interfaces(User) of
+                [] ->
+                    user;
+                [{user_drv, Drv}] ->
+                    case user_drv:interfaces(Drv) of
+                        [] ->
+                            user;
+                        [{current_group, IoDevice}] ->
+                            IoDevice
+                    end
+            end
+    end.
+
+decrypt_config([], _) ->
+    ok;
+decrypt_config([App|Apps], Algo) ->
+    decrypt_app(App, application:get_all_env(App), Algo),
+    decrypt_config(Apps, Algo).
+
+decrypt_app(_, [], _) ->
+    ok;
+decrypt_app(App, [{Key, Value}|Tail], Algo) ->
+    try begin
+            case decrypt(Value, Algo) of
+                Value ->
+                    ok;
+                NewValue ->
+                    application:set_env(App, Key, NewValue)
+            end
+        end
+    catch
+        exit:{bad_configuration, config_entry_decoder} ->
+            exit({bad_configuration, config_entry_decoder});
+        _:Msg ->
+            rabbit_log:info("Error while decrypting key '~p'. Please check encrypted value, passphrase, and encryption configuration~n", [Key]),
+            exit({decryption_error, {key, Key}, Msg})
+    end,
+    decrypt_app(App, Tail, Algo).
+
+decrypt({encrypted, _}, {_, _, _, undefined}) ->
+    exit({bad_configuration, config_entry_decoder});
+decrypt({encrypted, EncValue}, {Cipher, Hash, Iterations, Password}) ->
+    rabbit_pbe:decrypt_term(Cipher, Hash, Iterations, Password, EncValue);
+decrypt(List, Algo) when is_list(List) ->
+    decrypt_list(List, Algo, []);
+decrypt(Value, _) ->
+    Value.
+
+%% We make no distinction between strings and other lists.
+%% When we receive a string, we loop through each element
+%% and ultimately return the string unmodified, as intended.
+decrypt_list([], _, Acc) ->
+    lists:reverse(Acc);
+decrypt_list([{Key, Value}|Tail], Algo, Acc) when Key =/= encrypted ->
+    decrypt_list(Tail, Algo, [{Key, decrypt(Value, Algo)}|Acc]);
+decrypt_list([Value|Tail], Algo, Acc) ->
+    decrypt_list(Tail, Algo, [decrypt(Value, Algo)|Acc]).
+
 stop_apps(Apps) ->
     ok = app_utils:stop_applications(
            Apps, handle_app_error(error_during_shutdown)),
     case lists:member(rabbit, Apps) of
-        false -> run_cleanup_steps(Apps); %% plugin deactivation
-        true  -> ok                       %% it's all going anyway
+        %% plugin deactivation
+        false -> rabbit_boot_steps:run_cleanup_steps(Apps);
+        true  -> ok %% it's all going anyway
     end,
     ok.
 
@@ -415,10 +571,6 @@ handle_app_error(Term) ->
             throw({Term, App, Reason})
     end.
 
-run_cleanup_steps(Apps) ->
-    [run_step(Attrs, cleanup) || Attrs <- find_steps(Apps)],
-    ok.
-
 await_startup() ->
     await_startup(false).
 
@@ -437,6 +589,7 @@ await_startup(HaveSeenRabbitBoot) ->
 
 status() ->
     S1 = [{pid,                  list_to_integer(os:getpid())},
+          %% The timeout value used is twice that of gen_server:call/2.
           {running_applications, rabbit_misc:which_applications()},
           {os,                   os:type()},
           {erlang_version,       erlang:system_info(system_version)},
@@ -462,7 +615,8 @@ status() ->
           {uptime,           begin
                                  {T,_} = erlang:statistics(wall_clock),
                                  T div 1000
-                             end}],
+                             end},
+          {kernel,           {net_ticktime, net_kernel:get_net_ticktime()}}],
     S1 ++ S2 ++ S3 ++ S4.
 
 alarms() ->
@@ -492,17 +646,23 @@ is_running() -> is_running(node()).
 is_running(Node) -> rabbit_nodes:is_process_running(Node, rabbit).
 
 environment() ->
+    %% The timeout value is twice that of gen_server:call/2.
     [{A, environment(A)} ||
-        {A, _, _} <- lists:keysort(1, application:which_applications())].
+        {A, _, _} <- lists:keysort(1, application:which_applications(10000))].
 
 environment(App) ->
     Ignore = [default_pass, included_applications],
     lists:keysort(1, [P || P = {K, _} <- application:get_all_env(App),
                            not lists:member(K, Ignore)]).
 
+rotate_logs_info("") ->
+    rabbit_log:info("Reopening logs", []);
+rotate_logs_info(Suffix) ->
+    rabbit_log:info("Rotating logs with suffix '~s'~n", [Suffix]).
+
 rotate_logs(BinarySuffix) ->
     Suffix = binary_to_list(BinarySuffix),
-    rabbit_log:info("Rotating logs with suffix '~s'~n", [Suffix]),
+    rotate_logs_info(Suffix),
     log_rotation_result(rotate_logs(log_location(kernel),
                                     Suffix,
                                     rabbit_error_logger_file_h),
@@ -524,92 +684,24 @@ start(normal, []) ->
             log_banner(),
             warn_if_kernel_config_dubious(),
             warn_if_disc_io_options_dubious(),
-            run_boot_steps(),
+            rabbit_boot_steps:run_boot_steps(),
             {ok, SupPid};
         Error ->
             Error
     end.
 
-stop(_State) ->
+prep_stop(_State) ->
     ok = rabbit_alarm:stop(),
     ok = case rabbit_mnesia:is_clustered() of
-             true  -> rabbit_amqqueue:on_node_down(node());
+             true  -> ok;
              false -> rabbit_table:clear_ram_only_tables()
          end,
     ok.
 
-%%---------------------------------------------------------------------------
-%% boot step logic
-
-run_boot_steps() ->
-    run_boot_steps([App || {App, _, _} <- application:loaded_applications()]).
-
-run_boot_steps(Apps) ->
-    [ok = run_step(Attrs, mfa) || Attrs <- find_steps(Apps)],
-    ok.
-
-find_steps(Apps) ->
-    All = sort_boot_steps(rabbit_misc:all_module_attributes(rabbit_boot_step)),
-    [Attrs || {App, _, Attrs} <- All, lists:member(App, Apps)].
-
-run_step(Attributes, AttributeName) ->
-    case [MFA || {Key, MFA} <- Attributes,
-                 Key =:= AttributeName] of
-        [] ->
-            ok;
-        MFAs ->
-            [case apply(M,F,A) of
-                 ok              -> ok;
-                 {error, Reason} -> exit({error, Reason})
-             end || {M,F,A} <- MFAs],
-            ok
-    end.
+stop(_) -> ok.
 
-vertices({AppName, _Module, Steps}) ->
-    [{StepName, {AppName, StepName, Atts}} || {StepName, Atts} <- Steps].
-
-edges({_AppName, _Module, Steps}) ->
-    EnsureList = fun (L) when is_list(L) -> L;
-                     (T)                 -> [T]
-                 end,
-    [case Key of
-         requires -> {StepName, OtherStep};
-         enables  -> {OtherStep, StepName}
-     end || {StepName, Atts} <- Steps,
-            {Key, OtherStepOrSteps} <- Atts,
-            OtherStep <- EnsureList(OtherStepOrSteps),
-            Key =:= requires orelse Key =:= enables].
-
-sort_boot_steps(UnsortedSteps) ->
-    case rabbit_misc:build_acyclic_graph(fun vertices/1, fun edges/1,
-                                         UnsortedSteps) of
-        {ok, G} ->
-            %% Use topological sort to find a consistent ordering (if
-            %% there is one, otherwise fail).
-            SortedSteps = lists:reverse(
-                            [begin
-                                 {StepName, Step} = digraph:vertex(G,
-                                                                   StepName),
-                                 Step
-                             end || StepName <- digraph_utils:topsort(G)]),
-            digraph:delete(G),
-            %% Check that all mentioned {M,F,A} triples are exported.
-            case [{StepName, {M,F,A}} ||
-                     {_App, StepName, Attributes} <- SortedSteps,
-                     {mfa, {M,F,A}}               <- Attributes,
-                     not erlang:function_exported(M, F, length(A))] of
-                []         -> SortedSteps;
-                MissingFns -> exit({boot_functions_not_exported, MissingFns})
-            end;
-        {error, {vertex, duplicate, StepName}} ->
-            exit({duplicate_boot_step, StepName});
-        {error, {edge, Reason, From, To}} ->
-            exit({invalid_boot_step_dependency, From, To, Reason})
-    end.
+-spec boot_error(term(), not_available | [tuple()]) -> no_return().
 
--ifdef(use_specs).
--spec(boot_error/2 :: (term(), not_available | [tuple()]) -> no_return()).
--endif.
 boot_error({could_not_start, rabbit, {{timeout_waiting_for_tables, _}, _}},
            _Stacktrace) ->
     AllNodes = rabbit_mnesia:cluster_nodes(all),
@@ -637,10 +729,9 @@ boot_error(Reason, Stacktrace) ->
     Args = [Reason, log_location(kernel), log_location(sasl)],
     boot_error(Reason, Fmt, Args, Stacktrace).
 
--ifdef(use_specs).
--spec(boot_error/4 :: (term(), string(), [any()], not_available | [tuple()])
-                      -> no_return()).
--endif.
+-spec boot_error(term(), string(), [any()], not_available | [tuple()]) ->
+          no_return().
+
 boot_error(Reason, Fmt, Args, not_available) ->
     log_boot_error_and_exit(Reason, Fmt, Args);
 boot_error(Reason, Fmt, Args, Stacktrace) ->
@@ -784,11 +875,23 @@ log_broker_started(Plugins) ->
       end).
 
 erts_version_check() ->
-    FoundVer = erlang:system_info(version),
-    case rabbit_misc:version_compare(?ERTS_MINIMUM, FoundVer, lte) of
-        true  -> ok;
-        false -> {error, {erlang_version_too_old,
-                          {found, FoundVer}, {required, ?ERTS_MINIMUM}}}
+    ERTSVer = erlang:system_info(version),
+    OTPRel = erlang:system_info(otp_release),
+    case rabbit_misc:version_compare(?ERTS_MINIMUM, ERTSVer, lte) of
+        true when ?ERTS_MINIMUM =/= ERTSVer ->
+            ok;
+        true when ?ERTS_MINIMUM =:= ERTSVer andalso ?OTP_MINIMUM =< OTPRel ->
+            %% When a critical regression or bug is found, a new OTP
+            %% release can be published without changing the ERTS
+            %% version. For instance, this is the case with R16B03 and
+            %% R16B03-1.
+            %%
+            %% In this case, we compare the release versions
+            %% alphabetically.
+            ok;
+        _ -> {error, {erlang_version_too_old,
+                      {found, OTPRel, ERTSVer},
+                      {required, ?OTP_MINIMUM, ?ERTS_MINIMUM}}}
     end.
 
 print_banner() ->
@@ -800,7 +903,8 @@ print_banner() ->
               "~n  ##########  Logs: ~s"
               "~n  ######  ##        ~s"
               "~n  ##########"
-              "~n              Starting broker...",
+              "~n              Starting broker..."
+              "~n",
               [Product, Version, ?COPYRIGHT_MESSAGE, ?INFORMATION_MESSAGE,
                log_location(kernel), log_location(sasl)]).
 
@@ -829,11 +933,16 @@ log_banner() ->
     rabbit_log:info("~s", [Banner]).
 
 warn_if_kernel_config_dubious() ->
-    case erlang:system_info(kernel_poll) of
-        true  -> ok;
-        false -> rabbit_log:warning(
-                   "Kernel poll (epoll, kqueue, etc) is disabled. Throughput "
-                   "and CPU utilization may worsen.~n")
+    case os:type() of
+        {win32, _} ->
+            ok;
+        _ ->
+            case erlang:system_info(kernel_poll) of
+                true  -> ok;
+                false -> rabbit_log:warning(
+                           "Kernel poll (epoll, kqueue, etc) is disabled. Throughput "
+                           "and CPU utilization may worsen.~n")
+            end
     end,
     AsyncThreads = erlang:system_info(thread_pool_size),
     case AsyncThreads < ?ASYNC_THREADS_WARNING_THRESHOLD of
similarity index 84%
rename from rabbitmq-server/src/rabbit_access_control.erl
rename to deps/rabbit/src/rabbit_access_control.erl
index d9dd9cc3dc02973ab9d69de1d8909f15e7c839ba..3ae7d7f6906da194967c4889c6d4cfea3b0abeff 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_access_control).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([permission_atom/0]).
 
--type(permission_atom() :: 'configure' | 'read' | 'write').
-
--spec(check_user_pass_login/2 ::
-        (rabbit_types:username(), rabbit_types:password())
-        -> {'ok', rabbit_types:user()} |
-           {'refused', rabbit_types:username(), string(), [any()]}).
--spec(check_user_login/2 ::
-        (rabbit_types:username(), [{atom(), any()}])
-        -> {'ok', rabbit_types:user()} |
-           {'refused', rabbit_types:username(), string(), [any()]}).
--spec(check_user_loopback/2 :: (rabbit_types:username(),
-                                rabbit_net:socket() | inet:ip_address())
-        -> 'ok' | 'not_allowed').
--spec(check_vhost_access/3 ::
-        (rabbit_types:user(), rabbit_types:vhost(), rabbit_net:socket())
-        -> 'ok' | rabbit_types:channel_exit()).
--spec(check_resource_access/3 ::
-        (rabbit_types:user(), rabbit_types:r(atom()), permission_atom())
-        -> 'ok' | rabbit_types:channel_exit()).
-
--endif.
+-type permission_atom() :: 'configure' | 'read' | 'write'.
+
+-spec check_user_pass_login
+        (rabbit_types:username(), rabbit_types:password()) ->
+            {'ok', rabbit_types:user()} |
+            {'refused', rabbit_types:username(), string(), [any()]}.
+-spec check_user_login
+        (rabbit_types:username(), [{atom(), any()}]) ->
+            {'ok', rabbit_types:user()} |
+            {'refused', rabbit_types:username(), string(), [any()]}.
+-spec check_user_loopback
+        (rabbit_types:username(), rabbit_net:socket() | inet:ip_address()) ->
+            'ok' | 'not_allowed'.
+-spec check_vhost_access
+        (rabbit_types:user(), rabbit_types:vhost(),
+         rabbit_net:socket() | #authz_socket_info{}) ->
+            'ok' | rabbit_types:channel_exit().
+-spec check_resource_access
+        (rabbit_types:user(), rabbit_types:r(atom()), permission_atom()) ->
+            'ok' | rabbit_types:channel_exit().
 
 %%----------------------------------------------------------------------------
 
@@ -142,7 +139,7 @@ check_vhost_access(User = #user{username       = Username,
                               auth_user(User, Impl), VHostPath, Sock)
                 end,
                 Mod, "access to vhost '~s' refused for user '~s'",
-                [VHostPath, Username]);
+                [VHostPath, Username], not_allowed);
          (_, Else) ->
               Else
       end, ok, Modules).
@@ -164,7 +161,11 @@ check_resource_access(User = #user{username       = Username,
          (_, Else) -> Else
       end, ok, Modules).
 
+
 check_access(Fun, Module, ErrStr, ErrArgs) ->
+    check_access(Fun, Module, ErrStr, ErrArgs, access_refused).
+
+check_access(Fun, Module, ErrStr, ErrArgs, ErrName) ->
     Allow = case Fun() of
                 {error, E}  ->
                     rabbit_log:error(ErrStr ++ " by ~s: ~p~n",
@@ -177,5 +178,5 @@ check_access(Fun, Module, ErrStr, ErrArgs) ->
         true ->
             ok;
         false ->
-            rabbit_misc:protocol_error(access_refused, ErrStr, ErrArgs)
+            rabbit_misc:protocol_error(ErrName, ErrStr, ErrArgs)
     end.
similarity index 64%
rename from rabbitmq-server/src/rabbit_alarm.erl
rename to deps/rabbit/src/rabbit_alarm.erl
index 557fa31335d236e503040218793344a1fa55d295..dd64c6f1c8f8060c00d34d523e687619dd24c93d 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
+%% There are two types of alarms handled by this module:
+%%
+%% * per-node resource (disk, memory) alarms for the whole cluster. If any node
+%%   has an alarm, then all publishing should be disabled througout the
+%%   cluster until all alarms clear. When a node sets such an alarm,
+%%   this information is automatically propagated throughout the cluster.
+%%   `#alarms.alarmed_nodes' is being used to track this type of alarms.
+%% * limits local to this node (file_descriptor_limit). Used for information
+%%   purposes only: logging and getting node status. This information is not propagated
+%%   throughout the cluster. `#alarms.alarms' is being used to track this type of alarms.
+%% @end
 
 -module(rabbit_alarm).
 
 
 -define(SERVER, ?MODULE).
 
--record(alarms, {alertees, alarmed_nodes, alarms}).
-
-%%----------------------------------------------------------------------------
 
--ifdef(use_specs).
 
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start/0 :: () -> 'ok').
--spec(stop/0 :: () -> 'ok').
--spec(register/2 :: (pid(), rabbit_types:mfargs()) -> [atom()]).
--spec(set_alarm/1 :: (any()) -> 'ok').
--spec(clear_alarm/1 :: (any()) -> 'ok').
--spec(on_node_up/1 :: (node()) -> 'ok').
--spec(on_node_down/1 :: (node()) -> 'ok').
+%%----------------------------------------------------------------------------
 
--endif.
+-record(alarms, {alertees :: dict:dict(pid(), rabbit_types:mfargs()),
+                 alarmed_nodes :: dict:dict(node(), [resource_alarm_source()]),
+                 alarms :: [alarm()]}).
+
+-type local_alarm() :: 'file_descriptor_limit'.
+-type resource_alarm_source() :: 'disk' | 'memory'.
+-type resource_alarm() :: {resource_limit, resource_alarm_source(), node()}.
+-type alarm() :: local_alarm() | resource_alarm().
+
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start() -> 'ok'.
+-spec stop() -> 'ok'.
+-spec register(pid(), rabbit_types:mfargs()) -> [atom()].
+-spec set_alarm({alarm(), []}) -> 'ok'.
+-spec clear_alarm(alarm()) -> 'ok'.
+-spec on_node_up(node()) -> 'ok'.
+-spec on_node_down(node()) -> 'ok'.
+-spec get_alarms() -> [{alarm(), []}].
 
 %%----------------------------------------------------------------------------
 
@@ -54,6 +71,7 @@ start() ->
     ok = rabbit_sup:start_restartable_child(?MODULE),
     ok = gen_event:add_handler(?SERVER, ?MODULE, []),
     {ok, MemoryWatermark} = application:get_env(vm_memory_high_watermark),
+
     rabbit_sup:start_restartable_child(
       vm_memory_monitor, [MemoryWatermark,
                           fun (Alarm) ->
@@ -68,6 +86,10 @@ start() ->
 
 stop() -> ok.
 
+%% Registers a handler that should be called on every resource alarm change.
+%% Given a call rabbit_alarm:register(Pid, {M, F, A}), the handler would be
+%% called like this: `apply(M, F, A ++ [Pid, Source, Alert])', where `Source'
+%% has the type of resource_alarm_source() and `Alert' has the type of resource_alert().
 register(Pid, AlertMFA) ->
     gen_event:call(?SERVER, ?MODULE, {register, Pid, AlertMFA}, infinity).
 
@@ -79,10 +101,10 @@ get_alarms() -> gen_event:call(?SERVER, ?MODULE, get_alarms, infinity).
 on_node_up(Node)   -> gen_event:notify(?SERVER, {node_up,   Node}).
 on_node_down(Node) -> gen_event:notify(?SERVER, {node_down, Node}).
 
-remote_conserve_resources(Pid, Source, true) ->
+remote_conserve_resources(Pid, Source, {true, _, _}) ->
     gen_event:notify({?SERVER, node(Pid)},
                      {set_alarm, {{resource_limit, Source, node()}, []}});
-remote_conserve_resources(Pid, Source, false) ->
+remote_conserve_resources(Pid, Source, {false, _, _}) ->
     gen_event:notify({?SERVER, node(Pid)},
                      {clear_alarm, {resource_limit, Source, node()}}).
 
@@ -98,12 +120,17 @@ handle_call({register, Pid, AlertMFA}, State = #alarms{alarmed_nodes = AN}) ->
     {ok, lists:usort(lists:append([V || {_, V} <- dict:to_list(AN)])),
      internal_register(Pid, AlertMFA, State)};
 
-handle_call(get_alarms, State = #alarms{alarms = Alarms}) ->
-    {ok, Alarms, State};
+handle_call(get_alarms, State) ->
+    {ok, get_alarms(State), State};
 
 handle_call(_Request, State) ->
     {ok, not_understood, State}.
 
+handle_event({set_alarm, {{resource_limit, Source, Node}, []}}, State) ->
+    case is_node_alarmed(Source, Node, State) of
+        true -> {ok, State};
+        false -> handle_set_resource_alarm(Source, Node, State)
+    end;
 handle_event({set_alarm, Alarm}, State = #alarms{alarms = Alarms}) ->
     case lists:member(Alarm, Alarms) of
         true  -> {ok, State};
@@ -111,6 +138,13 @@ handle_event({set_alarm, Alarm}, State = #alarms{alarms = Alarms}) ->
                  handle_set_alarm(Alarm, State#alarms{alarms = UpdatedAlarms})
     end;
 
+handle_event({clear_alarm, {resource_limit, Source, Node}}, State) ->
+    case is_node_alarmed(Source, Node, State) of
+        true  ->
+            handle_clear_resource_alarm(Source, Node, State);
+        false ->
+            {ok, State}
+    end;
 handle_event({clear_alarm, Alarm}, State = #alarms{alarms = Alarms}) ->
     case lists:keymember(Alarm, 1, Alarms) of
         true  -> handle_clear_alarm(
@@ -127,8 +161,16 @@ handle_event({node_up, Node}, State) ->
            {register, self(), {?MODULE, remote_conserve_resources, []}}),
     {ok, State};
 
-handle_event({node_down, Node}, State) ->
-    {ok, maybe_alert(fun dict_unappend_all/3, Node, [], false, State)};
+handle_event({node_down, Node}, #alarms{alarmed_nodes = AN} = State) ->
+    AlarmsForDeadNode = case dict:find(Node, AN) of
+                            {ok, V} -> V;
+                            error   -> []
+                        end,
+    {ok, lists:foldr(fun(Source, AccState) ->
+                             rabbit_log:warning("~s resource limit alarm cleared for dead node ~p~n",
+                                                [Source, Node]),
+                             maybe_alert(fun dict_unappend/3, Node, Source, false, AccState)
+                     end, State, AlarmsForDeadNode)};
 
 handle_event({register, Pid, AlertMFA}, State) ->
     {ok, internal_register(Pid, AlertMFA, State)};
@@ -158,9 +200,6 @@ dict_append(Key, Val, Dict) ->
         end,
     dict:store(Key, lists:usort([Val|L]), Dict).
 
-dict_unappend_all(Key, _Val, Dict) ->
-    dict:erase(Key, Dict).
-
 dict_unappend(Key, Val, Dict) ->
     L = case dict:find(Key, Dict) of
             {ok, V} -> V;
@@ -172,10 +211,17 @@ dict_unappend(Key, Val, Dict) ->
         X  -> dict:store(Key, X, Dict)
     end.
 
-maybe_alert(UpdateFun, Node, Source, Alert,
+maybe_alert(UpdateFun, Node, Source, WasAlertAdded,
             State = #alarms{alarmed_nodes = AN,
                             alertees      = Alertees}) ->
     AN1 = UpdateFun(Node, Source, AN),
+    %% Is alarm for Source still set on any node?
+    StillHasAlerts = lists:any(fun ({_Node, NodeAlerts}) -> lists:member(Source, NodeAlerts) end, dict:to_list(AN1)),
+    case StillHasAlerts of
+        true -> ok;
+        false -> rabbit_log:warning("~s resource limit alarm cleared across the cluster~n", [Source])
+    end,
+    Alert = {WasAlertAdded, StillHasAlerts, Node},
     case node() of
         Node -> ok = alert_remote(Alert,  Alertees, Source);
         _    -> ok
@@ -202,20 +248,21 @@ internal_register(Pid, {M, F, A} = AlertMFA,
                   State = #alarms{alertees = Alertees}) ->
     _MRef = erlang:monitor(process, Pid),
     case dict:find(node(), State#alarms.alarmed_nodes) of
-        {ok, Sources} -> [apply(M, F, A ++ [Pid, R, true]) || R <- Sources];
+        {ok, Sources} -> [apply(M, F, A ++ [Pid, R, {true, true, node()}]) || R <- Sources];
         error          -> ok
     end,
     NewAlertees = dict:store(Pid, AlertMFA, Alertees),
     State#alarms{alertees = NewAlertees}.
 
-handle_set_alarm({{resource_limit, Source, Node}, []}, State) ->
+handle_set_resource_alarm(Source, Node, State) ->
     rabbit_log:warning(
       "~s resource limit alarm set on node ~p.~n~n"
       "**********************************************************~n"
       "*** Publishers will be blocked until this alarm clears ***~n"
       "**********************************************************~n",
       [Source, Node]),
-    {ok, maybe_alert(fun dict_append/3, Node, Source, true, State)};
+    {ok, maybe_alert(fun dict_append/3, Node, Source, true, State)}.
+
 handle_set_alarm({file_descriptor_limit, []}, State) ->
     rabbit_log:warning(
       "file descriptor limit alarm set.~n~n"
@@ -227,13 +274,27 @@ handle_set_alarm(Alarm, State) ->
     rabbit_log:warning("alarm '~p' set~n", [Alarm]),
     {ok, State}.
 
-handle_clear_alarm({resource_limit, Source, Node}, State) ->
+handle_clear_resource_alarm(Source, Node, State) ->
     rabbit_log:warning("~s resource limit alarm cleared on node ~p~n",
                        [Source, Node]),
-    {ok, maybe_alert(fun dict_unappend/3, Node, Source, false, State)};
+    {ok, maybe_alert(fun dict_unappend/3, Node, Source, false, State)}.
+
 handle_clear_alarm(file_descriptor_limit, State) ->
     rabbit_log:warning("file descriptor limit alarm cleared~n"),
     {ok, State};
 handle_clear_alarm(Alarm, State) ->
     rabbit_log:warning("alarm '~p' cleared~n", [Alarm]),
     {ok, State}.
+
+is_node_alarmed(Source, Node, #alarms{alarmed_nodes = AN}) ->
+    case dict:find(Node, AN) of
+        {ok, Sources} ->
+            lists:member(Source, Sources);
+        error ->
+            false
+    end.
+
+get_alarms(#alarms{alarms = Alarms,
+                   alarmed_nodes = AN}) ->
+    Alarms ++ [ {{resource_limit, Source, Node}, []}
+                || {Node, Sources} <- dict:to_list(AN), Source <- Sources ].
similarity index 89%
rename from rabbitmq-server/src/rabbit_amqqueue_process.erl
rename to deps/rabbit/src/rabbit_amqqueue_process.erl
index 999e66aee3d8dae10838fc5197d8f06d141cd650..bfa868c6517563e4a0f1ea81ce79328480d3cb90 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqqueue_process).
          prioritise_cast/3, prioritise_info/3, format_message_queue/2]).
 
 %% Queue's state
--record(q, {q,
+-record(q, {
+            %% an #amqqueue record
+            q,
+            %% none | {exclusive consumer channel PID, consumer tag}
             exclusive_consumer,
+            %% Set to true if a queue has ever had a consumer.
+            %% This is used to determine when to delete auto-delete queues.
             has_had_consumers,
+            %% backing queue module.
+            %% for mirrored queues, this will be rabbit_mirror_queue_master.
+            %% for non-priority and non-mirrored queues, rabbit_variable_queue.
+            %% see rabbit_backing_queue.
             backing_queue,
+            %% backing queue state.
+            %% see rabbit_backing_queue, rabbit_variable_queue.
             backing_queue_state,
+            %% consumers state, see rabbit_queue_consumers
             consumers,
+            %% queue expiration value
             expires,
+            %% timer used to periodically sync (flush) queue index
             sync_timer_ref,
+            %% timer used to update ingress/egress rates and queue RAM duration target
             rate_timer_ref,
+            %% timer used to clean up this queue due to TTL (on when unused)
             expiry_timer_ref,
+            %% stats emission timer
             stats_timer,
+            %% maps message IDs to {channel pid, MsgSeqNo}
+            %% pairs
             msg_id_to_channel,
+            %% message TTL value
             ttl,
+            %% timer used to delete expired messages
             ttl_timer_ref,
             ttl_timer_expiry,
+            %% Keeps track of channels that publish to this queue.
+            %% When channel process goes down, queues have to perform
+            %% certain cleanup.
             senders,
+            %% dead letter exchange as a #resource record, if any
             dlx,
             dlx_routing_key,
+            %% max length in messages, if configured
             max_length,
+            %% max length in bytes, if configured
             max_bytes,
+            %% when policies change, this version helps queue
+            %% determine what previously scheduled/set up state to ignore,
+            %% e.g. message expiration messages from previously set up timers
+            %% that may or may not be still valid
             args_policy_version,
+            mirroring_policy_version = 0,
+            %% running | flow | idle
             status
            }).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(init_with_backing_queue_state/7 ::
+-spec info_keys() -> rabbit_types:info_keys().
+-spec init_with_backing_queue_state
         (rabbit_types:amqqueue(), atom(), tuple(), any(),
-         [rabbit_types:delivery()], pmon:pmon(), dict:dict()) -> #q{}).
-
--endif.
+         [rabbit_types:delivery()], pmon:pmon(), ?DICT_TYPE()) ->
+            #q{}.
 
 %%----------------------------------------------------------------------------
 
          slave_pids,
          synchronised_slave_pids,
          recoverable_slaves,
-         state
+         state,
+         reductions,
+         garbage_collection
         ]).
 
 -define(CREATION_EVENT_KEYS,
          durable,
          auto_delete,
          arguments,
-         owner_pid
+         owner_pid,
+         exclusive
         ]).
 
 -define(INFO_KEYS, [pid | ?CREATION_EVENT_KEYS ++ ?STATISTICS_KEYS -- [name]]).
@@ -317,7 +350,8 @@ process_args_policy(State = #q{q                   = Q,
          {<<"dead-letter-routing-key">>, fun res_arg/2, fun init_dlx_rkey/2},
          {<<"message-ttl">>,             fun res_min/2, fun init_ttl/2},
          {<<"max-length">>,              fun res_min/2, fun init_max_length/2},
-         {<<"max-length-bytes">>,        fun res_min/2, fun init_max_bytes/2}],
+         {<<"max-length-bytes">>,        fun res_min/2, fun init_max_bytes/2},
+         {<<"queue-mode">>,              fun res_arg/2, fun init_queue_mode/2}],
       drop_expired_msgs(
          lists:foldl(fun({Name, Resolve, Fun}, StateN) ->
                              Fun(args_policy_lookup(Name, Resolve, Q), StateN)
@@ -360,6 +394,13 @@ init_max_bytes(MaxBytes, State) ->
     {_Dropped, State1} = maybe_drop_head(State#q{max_bytes = MaxBytes}),
     State1.
 
+init_queue_mode(undefined, State) ->
+    State;
+init_queue_mode(Mode, State = #q {backing_queue = BQ,
+                                  backing_queue_state = BQS}) ->
+    BQS1 = BQ:set_queue_mode(binary_to_existing_atom(Mode, utf8), BQS),
+    State#q{backing_queue_state = BQS1}.
+
 reply(Reply, NewState) ->
     {NewState1, Timeout} = next_state(NewState),
     {reply, Reply, ensure_stats_timer(ensure_rate_timer(NewState1)), Timeout}.
@@ -421,7 +462,7 @@ ensure_ttl_timer(undefined, State) ->
     State;
 ensure_ttl_timer(Expiry, State = #q{ttl_timer_ref       = undefined,
                                     args_policy_version = Version}) ->
-    After = (case Expiry - now_micros() of
+    After = (case Expiry - time_compat:os_system_time(micro_seconds) of
                  V when V > 0 -> V + 999; %% always fire later
                  _            -> 0
              end) div 1000,
@@ -741,7 +782,7 @@ calculate_msg_expiry(#basic_message{content = Content}, TTL) ->
     {ok, MsgTTL} = rabbit_basic:parse_expiration(Props),
     case lists:min([TTL, MsgTTL]) of
         undefined -> undefined;
-        T         -> now_micros() + T * 1000
+        T         -> time_compat:os_system_time(micro_seconds) + T * 1000
     end.
 
 %% Logically this function should invoke maybe_send_drained/2.
@@ -752,7 +793,8 @@ calculate_msg_expiry(#basic_message{content = Content}, TTL) ->
 drop_expired_msgs(State) ->
     case is_empty(State) of
         true  -> State;
-        false -> drop_expired_msgs(now_micros(), State)
+        false -> drop_expired_msgs(time_compat:os_system_time(micro_seconds),
+                                   State)
     end.
 
 drop_expired_msgs(Now, State = #q{backing_queue_state = BQS,
@@ -815,8 +857,6 @@ stop(State) -> stop(noreply, State).
 stop(noreply, State) -> {stop, normal, State};
 stop(Reply,   State) -> {stop, normal, Reply, State}.
 
-now_micros() -> timer:now_diff(now(), {0,0,0}).
-
 infos(Items, State) -> [{Item, i(Item, State)} || Item <- Items].
 
 i(name,        #q{q = #amqqueue{name        = Name}})       -> Name;
@@ -829,6 +869,8 @@ i(owner_pid, #q{q = #amqqueue{exclusive_owner = none}}) ->
     '';
 i(owner_pid, #q{q = #amqqueue{exclusive_owner = ExclusiveOwner}}) ->
     ExclusiveOwner;
+i(exclusive, #q{q = #amqqueue{exclusive_owner = ExclusiveOwner}}) ->
+    is_pid(ExclusiveOwner);
 i(policy,    #q{q = Q}) ->
     case rabbit_policy:name(Q) of
         none   -> '';
@@ -883,6 +925,11 @@ i(recoverable_slaves, #q{q = #amqqueue{name    = Name,
     end;
 i(state, #q{status = running}) -> credit_flow:state();
 i(state, #q{status = State})   -> State;
+i(garbage_collection, _State) ->
+    rabbit_misc:get_gc_info(self());
+i(reductions, _State) ->
+    {reductions, Reductions} = erlang:process_info(self(), reductions),
+    Reductions;
 i(Item, #q{backing_queue_state = BQS, backing_queue = BQ}) ->
     BQ:info(Item, BQS).
 
@@ -966,7 +1013,17 @@ prioritise_info(Msg, _Len, #q{q = #amqqueue{exclusive_owner = DownPid}}) ->
     end.
 
 handle_call({init, Recover}, From, State) ->
-    init_it(Recover, From, State);
+    try
+       init_it(Recover, From, State)
+    catch
+       {coordinator_not_started, Reason} ->
+           %% The GM can shutdown before the coordinator has started up
+           %% (lost membership or missing group), thus the start_link of
+           %% the coordinator returns {error, shutdown} as rabbit_amqqueue_process
+           %% is trapping exists. The master captures this return value and
+           %% throws the current exception.
+           {stop, Reason, State}
+    end;
 
 handle_call(info, _From, State) ->
     reply(infos(info_keys(), State), State);
@@ -1112,7 +1169,17 @@ handle_call(cancel_sync_mirrors, _From, State) ->
     reply({ok, not_syncing}, State).
 
 handle_cast(init, State) ->
-    init_it({no_barrier, non_clean_shutdown}, none, State);
+    try
+       init_it({no_barrier, non_clean_shutdown}, none, State)
+    catch
+       {coordinator_not_started, Reason} ->
+           %% The GM can shutdown before the coordinator has started up
+           %% (lost membership or missing group), thus the start_link of
+           %% the coordinator returns {error, shutdown} as rabbit_amqqueue_process
+           %% is trapping exists. The master captures this return value and
+           %% throws the current exception.
+           {stop, Reason, State}
+    end;
 
 handle_cast({run_backing_queue, Mod, Fun},
             State = #q{backing_queue = BQ, backing_queue_state = BQS}) ->
@@ -1179,22 +1246,15 @@ handle_cast({set_maximum_since_use, Age}, State) ->
     ok = file_handle_cache:set_maximum_since_use(Age),
     noreply(State);
 
-handle_cast(start_mirroring, State = #q{backing_queue       = BQ,
-                                        backing_queue_state = BQS}) ->
-    %% lookup again to get policy for init_with_existing_bq
-    {ok, Q} = rabbit_amqqueue:lookup(qname(State)),
-    true = BQ =/= rabbit_mirror_queue_master, %% assertion
-    BQ1 = rabbit_mirror_queue_master,
-    BQS1 = BQ1:init_with_existing_bq(Q, BQ, BQS),
-    noreply(State#q{backing_queue       = BQ1,
-                    backing_queue_state = BQS1});
-
-handle_cast(stop_mirroring, State = #q{backing_queue       = BQ,
-                                       backing_queue_state = BQS}) ->
-    BQ = rabbit_mirror_queue_master, %% assertion
-    {BQ1, BQS1} = BQ:stop_mirroring(BQS),
-    noreply(State#q{backing_queue       = BQ1,
-                    backing_queue_state = BQS1});
+handle_cast(update_mirroring, State = #q{q = Q,
+                                         mirroring_policy_version = Version}) ->
+    case needs_update_mirroring(Q, Version) of
+        false ->
+            noreply(State);
+        {Policy, NewVersion} ->
+            State1 = State#q{mirroring_policy_version = NewVersion},
+            noreply(update_mirroring(Policy, State1))
+    end;
 
 handle_cast({credit, ChPid, CTag, Credit, Drain},
             State = #q{consumers           = Consumers,
@@ -1240,7 +1300,13 @@ handle_cast(policy_changed, State = #q{q = #amqqueue{name = Name}}) ->
     %% This also has the side effect of waking us up so we emit a
     %% stats event - so event consumers see the changed policy.
     {ok, Q} = rabbit_amqqueue:lookup(Name),
-    noreply(process_args_policy(State#q{q = Q})).
+    noreply(process_args_policy(State#q{q = Q}));
+
+handle_cast({sync_start, _, _}, State = #q{q = #amqqueue{name = Name}}) ->
+    %% Only a slave should receive this, it means we are a duplicated master
+    rabbit_mirror_queue_misc:log_warning(
+      Name, "Stopping after receiving sync_start from another master", []),
+    stop(State).
 
 handle_info({maybe_expire, Vsn}, State = #q{args_policy_version = Vsn}) ->
     case is_unused(State) of
@@ -1327,10 +1393,64 @@ handle_pre_hibernate(State = #q{backing_queue = BQ,
     BQS3 = BQ:handle_pre_hibernate(BQS2),
     rabbit_event:if_enabled(
       State, #q.stats_timer,
-      fun () -> emit_stats(State, [{idle_since,           now()},
-                                   {consumer_utilisation, ''}]) end),
+      fun () -> emit_stats(State,
+                           [{idle_since,
+                             time_compat:os_system_time(milli_seconds)},
+                            {consumer_utilisation, ''}])
+                end),
     State1 = rabbit_event:stop_stats_timer(State#q{backing_queue_state = BQS3},
                                            #q.stats_timer),
     {hibernate, stop_rate_timer(State1)}.
 
 format_message_queue(Opt, MQ) -> rabbit_misc:format_message_queue(Opt, MQ).
+
+needs_update_mirroring(Q, Version) ->
+    {ok, UpQ} = rabbit_amqqueue:lookup(Q#amqqueue.name),
+    DBVersion = UpQ#amqqueue.policy_version,
+    case DBVersion > Version of
+        true -> {rabbit_policy:get(<<"ha-mode">>, UpQ), DBVersion};
+        false -> false
+    end.
+
+update_mirroring(Policy, State = #q{backing_queue = BQ}) ->
+    case update_to(Policy, BQ) of
+        start_mirroring ->
+            start_mirroring(State);
+        stop_mirroring ->
+            stop_mirroring(State);
+        ignore ->
+            State;
+        update_ha_mode ->
+            update_ha_mode(State)
+    end.
+
+update_to(undefined, rabbit_mirror_queue_master) ->
+    stop_mirroring;
+update_to(_, rabbit_mirror_queue_master) ->
+    update_ha_mode;
+update_to(undefined, BQ) when BQ =/= rabbit_mirror_queue_master ->
+    ignore;
+update_to(_, BQ) when BQ =/= rabbit_mirror_queue_master ->
+    start_mirroring.
+
+start_mirroring(State = #q{backing_queue       = BQ,
+                           backing_queue_state = BQS}) ->
+    %% lookup again to get policy for init_with_existing_bq
+    {ok, Q} = rabbit_amqqueue:lookup(qname(State)),
+    true = BQ =/= rabbit_mirror_queue_master, %% assertion
+    BQ1 = rabbit_mirror_queue_master,
+    BQS1 = BQ1:init_with_existing_bq(Q, BQ, BQS),
+    State#q{backing_queue       = BQ1,
+            backing_queue_state = BQS1}.
+
+stop_mirroring(State = #q{backing_queue       = BQ,
+                          backing_queue_state = BQS}) ->
+    BQ = rabbit_mirror_queue_master, %% assertion
+    {BQ1, BQS1} = BQ:stop_mirroring(BQS),
+    State#q{backing_queue       = BQ1,
+            backing_queue_state = BQS1}.
+
+update_ha_mode(State) ->
+    {ok, Q} = rabbit_amqqueue:lookup(qname(State)),
+    ok = rabbit_mirror_queue_misc:update_mirrors(Q),
+    State.
similarity index 82%
rename from rabbitmq-server/src/rabbit_amqqueue_sup.erl
rename to deps/rabbit/src/rabbit_amqqueue_sup.erl
index f05ff430cbab99d12418d3d4cde8cc1aec435b40..f1e770aa455a6bdc05ed2deebf92780a973a93c3 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqqueue_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/2 :: (rabbit_types:amqqueue(), rabbit_prequeue:start_mode()) ->
-                           {'ok', pid(), pid()}).
-
--endif.
+-spec start_link(rabbit_types:amqqueue(), rabbit_prequeue:start_mode()) ->
+          {'ok', pid(), pid()}.
 
 %%----------------------------------------------------------------------------
 
@@ -39,7 +35,7 @@ start_link(Q, StartMode) ->
     Marker = spawn_link(fun() -> receive stop -> ok end end),
     ChildSpec = {rabbit_amqqueue,
                  {rabbit_prequeue, start_link, [Q, StartMode, Marker]},
-                 intrinsic, ?MAX_WAIT, worker, [rabbit_amqqueue_process,
+                 intrinsic, ?WORKER_WAIT, worker, [rabbit_amqqueue_process,
                                                 rabbit_mirror_queue_slave]},
     {ok, SupPid} = supervisor2:start_link(?MODULE, []),
     {ok, QPid} = supervisor2:start_child(SupPid, ChildSpec),
similarity index 78%
rename from rabbitmq-server/src/rabbit_amqqueue_sup_sup.erl
rename to deps/rabbit/src/rabbit_amqqueue_sup_sup.erl
index 4c4bb52ae5f593d4cd15969114eb13eff796d24d..c57d9334e26c99af25b518198b9a5240ee3da049 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqqueue_sup_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start_queue_process/3 :: (node(), rabbit_types:amqqueue(),
-                               'declare' | 'recovery' | 'slave') -> pid()).
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start_queue_process
+        (node(), rabbit_types:amqqueue(), 'declare' | 'recovery' | 'slave') ->
+            pid().
 
 %%----------------------------------------------------------------------------
 
@@ -49,4 +46,4 @@ start_queue_process(Node, Q, StartMode) ->
 init([]) ->
     {ok, {{simple_one_for_one, 10, 10},
           [{rabbit_amqqueue_sup, {rabbit_amqqueue_sup, start_link, []},
-            temporary, ?MAX_WAIT, supervisor, [rabbit_amqqueue_sup]}]}}.
+            temporary, ?SUPERVISOR_WAIT, supervisor, [rabbit_amqqueue_sup]}]}}.
similarity index 96%
rename from rabbitmq-server/src/rabbit_auth_mechanism_amqplain.erl
rename to deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl
index 621c575ebf1b2af802b4fd6acadce93efffaf9c5..aad004a62889e1ae0a89b1bb7f72d8e56788c347 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_mechanism_amqplain).
similarity index 96%
rename from rabbitmq-server/src/rabbit_auth_mechanism_cr_demo.erl
rename to deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl
index d9d7b11f15c624334984c1037fa757588d459e6e..ed74c3537ee88b6c00fb26c8d19c9d9d44811ab7 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_mechanism_cr_demo).
similarity index 97%
rename from rabbitmq-server/src/rabbit_auth_mechanism_plain.erl
rename to deps/rabbit/src/rabbit_auth_mechanism_plain.erl
index 7a5f433abd694a630be24b7f4d4d654bf751dfb0..431319622e26bc813470a889512e87b1fed7ddd1 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_mechanism_plain).
similarity index 82%
rename from rabbitmq-server/src/rabbit_autoheal.erl
rename to deps/rabbit/src/rabbit_autoheal.erl
index cd53127e599fd5b00343ea3cedb738a5614431d7..3adcc09692670c7f6f1688d54f952f30c5f17890 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_autoheal).
@@ -180,6 +180,18 @@ node_down(_Node, not_healing) ->
 node_down(Node, {winner_waiting, _, Notify}) ->
     abort([Node], Notify);
 
+node_down(Node, {leader_waiting, Node, _Notify}) ->
+    %% The winner went down, we don't know what to do so we simply abort.
+    rabbit_log:info("Autoheal: aborting - winner ~p went down~n", [Node]),
+    not_healing;
+
+node_down(Node, {leader_waiting, _, _} = St) ->
+    %% If it is a partial partition, the winner might continue with the
+    %% healing process. If it is a full partition, the winner will also
+    %% see it and abort. Let's wait for it.
+    rabbit_log:info("Autoheal: ~p went down, waiting for winner decision ~n", [Node]),
+    St;
+
 node_down(Node, _State) ->
     rabbit_log:info("Autoheal: aborting - ~p went down~n", [Node]),
     not_healing.
@@ -218,14 +230,24 @@ handle_msg({become_winner, Losers},
            not_healing, _Partitions) ->
     rabbit_log:info("Autoheal: I am the winner, waiting for ~p to stop~n",
                     [Losers]),
-    %% The leader said everything was ready - do we agree? If not then
-    %% give up.
-    Down = Losers -- rabbit_node_monitor:alive_rabbit_nodes(Losers),
-    case Down of
-        [] -> [send(L, {winner_is, node()}) || L <- Losers],
-              {winner_waiting, Losers, Losers};
-        _  -> abort(Down, Losers)
-    end;
+    stop_partition(Losers);
+
+handle_msg({become_winner, Losers},
+           {winner_waiting, _, Losers}, _Partitions) ->
+    %% The leader has aborted the healing, might have seen us down but
+    %% we didn't see the same. Let's try again as it is the same partition.
+    rabbit_log:info("Autoheal: I am the winner and received a duplicated "
+                   "request, waiting again for ~p to stop~n", [Losers]),
+    stop_partition(Losers);
+
+handle_msg({become_winner, _},
+           {winner_waiting, _, Losers}, _Partitions) ->
+    %% Something has happened to the leader, it might have seen us down but we
+    %% are still alive. Partitions have changed, cannot continue.
+    rabbit_log:info("Autoheal: I am the winner and received another healing "
+                   "request, partitions have changed to ~p. Aborting ~n", [Losers]),
+    winner_finish(Losers),
+    not_healing;
 
 handle_msg({winner_is, Winner}, State = not_healing,
            _Partitions) ->
@@ -269,6 +291,14 @@ handle_msg({autoheal_finished, Winner}, not_healing, _Partitions)
     %% We are the leader and the winner. The state already transitioned
     %% to "not_healing" at the end of the autoheal process.
     rabbit_log:info("Autoheal finished according to winner ~p~n", [node()]),
+    not_healing;
+
+handle_msg({autoheal_finished, Winner}, not_healing, _Partitions) ->
+    %% We might have seen the winner down during a partial partition and
+    %% transitioned to not_healing. However, the winner was still able
+    %% to finish. Let it pass.
+    rabbit_log:info("Autoheal finished according to winner ~p."
+                   " Unexpected, I might have previously seen the winner down~n", [Winner]),
     not_healing.
 
 %%----------------------------------------------------------------------------
@@ -279,7 +309,9 @@ abort(Down, Notify) ->
     rabbit_log:info("Autoheal: aborting - ~p down~n", [Down]),
     %% Make sure any nodes waiting for us start - it won't necessarily
     %% heal the partition but at least they won't get stuck.
-    winner_finish(Notify).
+    %% If we are executing this, we are not stopping. Thus, don't wait
+    %% for ourselves!
+    winner_finish(Notify -- [node()]).
 
 winner_finish(Notify) ->
     %% There is a race in Mnesia causing a starting loser to hang
@@ -297,21 +329,33 @@ winner_finish(Notify) ->
     send(leader(), {autoheal_finished, node()}),
     not_healing.
 
-wait_for_mnesia_shutdown([Node | Rest] = AllNodes) ->
-    case rpc:call(Node, mnesia, system_info, [is_running]) of
-        no ->
-            wait_for_mnesia_shutdown(Rest);
-        Running when
-        Running =:= yes orelse
-        Running =:= starting orelse
-        Running =:= stopping ->
-            timer:sleep(?MNESIA_STOPPED_PING_INTERNAL),
-            wait_for_mnesia_shutdown(AllNodes);
-        _ ->
-            wait_for_mnesia_shutdown(Rest)
-    end;
-wait_for_mnesia_shutdown([]) ->
-    ok.
+%% This improves the previous implementation, but could still potentially enter an infinity
+%% loop. If it also possible that for when it finishes some of the nodes have been
+%% manually restarted, but we can't do much more (apart from stop them again). So let it
+%% continue and notify all the losers to restart.
+wait_for_mnesia_shutdown(AllNodes) ->
+    Monitors = lists:foldl(fun(Node, Monitors0) ->
+                                  pmon:monitor({mnesia_sup, Node}, Monitors0)
+                          end, pmon:new(), AllNodes),
+    wait_for_supervisors(Monitors).
+
+wait_for_supervisors(Monitors) ->
+    case pmon:is_empty(Monitors) of
+       true ->
+           ok;
+       false ->
+           receive
+               {'DOWN', _MRef, process, {mnesia_sup, _} = I, _Reason} ->
+                   wait_for_supervisors(pmon:erase(I, Monitors))
+           after
+               60000 ->
+                   AliveLosers = [Node || {_, Node} <- pmon:monitored(Monitors)],
+                   rabbit_log:info("Autoheal: mnesia in nodes ~p is still up, sending "
+                                   "winner notification again to these ~n", [AliveLosers]),
+                   [send(L, {winner_is, node()}) || L <- AliveLosers],
+                   wait_for_mnesia_shutdown(AliveLosers)
+           end
+    end.
 
 restart_loser(State, Winner) ->
     rabbit_log:warning(
@@ -391,3 +435,13 @@ fmt_error({remote_down, RemoteDown}) ->
     rabbit_misc:format("Remote nodes disconnected:~n ~p", [RemoteDown]);
 fmt_error({nodes_down, NodesDown}) ->
     rabbit_misc:format("Local nodes down: ~p", [NodesDown]).
+
+stop_partition(Losers) ->
+    %% The leader said everything was ready - do we agree? If not then
+    %% give up.
+    Down = Losers -- rabbit_node_monitor:alive_rabbit_nodes(Losers),
+    case Down of
+        [] -> [send(L, {winner_is, node()}) || L <- Losers],
+              {winner_waiting, Losers, Losers};
+        _  -> abort(Down, Losers)
+    end.
similarity index 89%
rename from rabbitmq-server/src/rabbit_binding.erl
rename to deps/rabbit/src/rabbit_binding.erl
index 77a9277c4a956a4a416c05ff8875f65c8947ddb0..51bc883976f76d48e85b13011b5b674db3d9602c 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_binding).
          list_for_source_and_destination/2]).
 -export([new_deletions/0, combine_deletions/2, add_deletion/3,
          process_deletions/1]).
--export([info_keys/0, info/1, info/2, info_all/1, info_all/2]).
+-export([info_keys/0, info/1, info/2, info_all/1, info_all/2, info_all/4]).
 %% these must all be run inside a mnesia tx
 -export([has_for_source/1, remove_for_source/1,
          remove_for_destination/2, remove_transient_for_destination/1]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([key/0, deletions/0]).
 
--type(key() :: binary()).
+-type key() :: binary().
 
--type(bind_errors() :: rabbit_types:error(
+-type bind_errors() :: rabbit_types:error(
                          {'resources_missing',
                           [{'not_found', (rabbit_types:binding_source() |
                                           rabbit_types:binding_destination())} |
-                           {'absent', rabbit_types:amqqueue()}]})).
+                           {'absent', rabbit_types:amqqueue()}]}).
 
--type(bind_ok_or_error() :: 'ok' | bind_errors() |
+-type bind_ok_or_error() :: 'ok' | bind_errors() |
                             rabbit_types:error(
                               'binding_not_found' |
-                              {'binding_invalid', string(), [any()]})).
--type(bind_res() :: bind_ok_or_error() | rabbit_misc:thunk(bind_ok_or_error())).
--type(inner_fun() ::
+                              {'binding_invalid', string(), [any()]}).
+-type bind_res() :: bind_ok_or_error() | rabbit_misc:thunk(bind_ok_or_error()).
+-type inner_fun() ::
         fun((rabbit_types:exchange(),
              rabbit_types:exchange() | rabbit_types:amqqueue()) ->
-                   rabbit_types:ok_or_error(rabbit_types:amqp_error()))).
--type(bindings() :: [rabbit_types:binding()]).
+                   rabbit_types:ok_or_error(rabbit_types:amqp_error())).
+-type bindings() :: [rabbit_types:binding()].
 
 %% TODO this should really be opaque but that seems to confuse 17.1's
 %% dialyzer into objecting to everything that uses it.
--type(deletions() :: dict:dict()).
-
--spec(recover/2 :: ([rabbit_exchange:name()], [rabbit_amqqueue:name()]) ->
-                        'ok').
--spec(exists/1 :: (rabbit_types:binding()) -> boolean() | bind_errors()).
--spec(add/1    :: (rabbit_types:binding())              -> bind_res()).
--spec(add/2    :: (rabbit_types:binding(), inner_fun()) -> bind_res()).
--spec(remove/1 :: (rabbit_types:binding())              -> bind_res()).
--spec(remove/2 :: (rabbit_types:binding(), inner_fun()) -> bind_res()).
--spec(list/1 :: (rabbit_types:vhost()) -> bindings()).
--spec(list_for_source/1 ::
-        (rabbit_types:binding_source()) -> bindings()).
--spec(list_for_destination/1 ::
-        (rabbit_types:binding_destination()) -> bindings()).
--spec(list_for_source_and_destination/2 ::
+-type deletions() :: ?DICT_TYPE().
+
+-spec recover([rabbit_exchange:name()], [rabbit_amqqueue:name()]) ->
+                        'ok'.
+-spec exists(rabbit_types:binding()) -> boolean() | bind_errors().
+-spec add(rabbit_types:binding())              -> bind_res().
+-spec add(rabbit_types:binding(), inner_fun()) -> bind_res().
+-spec remove(rabbit_types:binding())              -> bind_res().
+-spec remove(rabbit_types:binding(), inner_fun()) -> bind_res().
+-spec list(rabbit_types:vhost()) -> bindings().
+-spec list_for_source
+        (rabbit_types:binding_source()) -> bindings().
+-spec list_for_destination
+        (rabbit_types:binding_destination()) -> bindings().
+-spec list_for_source_and_destination
         (rabbit_types:binding_source(), rabbit_types:binding_destination()) ->
-                                                bindings()).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(info/1 :: (rabbit_types:binding()) -> rabbit_types:infos()).
--spec(info/2 :: (rabbit_types:binding(), rabbit_types:info_keys()) ->
-                     rabbit_types:infos()).
--spec(info_all/1 :: (rabbit_types:vhost()) -> [rabbit_types:infos()]).
--spec(info_all/2 ::(rabbit_types:vhost(), rabbit_types:info_keys())
-                   -> [rabbit_types:infos()]).
--spec(has_for_source/1 :: (rabbit_types:binding_source()) -> boolean()).
--spec(remove_for_source/1 :: (rabbit_types:binding_source()) -> bindings()).
--spec(remove_for_destination/2 ::
-        (rabbit_types:binding_destination(), boolean()) -> deletions()).
--spec(remove_transient_for_destination/1 ::
-        (rabbit_types:binding_destination()) -> deletions()).
--spec(process_deletions/1 :: (deletions()) -> rabbit_misc:thunk('ok')).
--spec(combine_deletions/2 :: (deletions(), deletions()) -> deletions()).
--spec(add_deletion/3 :: (rabbit_exchange:name(),
-                         {'undefined' | rabbit_types:exchange(),
-                          'deleted' | 'not_deleted',
-                          bindings()}, deletions()) -> deletions()).
--spec(new_deletions/0 :: () -> deletions()).
-
--endif.
+                                                bindings().
+-spec info_keys() -> rabbit_types:info_keys().
+-spec info(rabbit_types:binding()) -> rabbit_types:infos().
+-spec info(rabbit_types:binding(), rabbit_types:info_keys()) ->
+          rabbit_types:infos().
+-spec info_all(rabbit_types:vhost()) -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:vhost(), rabbit_types:info_keys()) ->
+          [rabbit_types:infos()].
+-spec info_all(rabbit_types:vhost(), rabbit_types:info_keys(),
+                    reference(), pid()) -> 'ok'.
+-spec has_for_source(rabbit_types:binding_source()) -> boolean().
+-spec remove_for_source(rabbit_types:binding_source()) -> bindings().
+-spec remove_for_destination
+        (rabbit_types:binding_destination(), boolean()) -> deletions().
+-spec remove_transient_for_destination
+        (rabbit_types:binding_destination()) -> deletions().
+-spec process_deletions(deletions()) -> rabbit_misc:thunk('ok').
+-spec combine_deletions(deletions(), deletions()) -> deletions().
+-spec add_deletion
+        (rabbit_exchange:name(),
+         {'undefined' | rabbit_types:exchange(),
+          'deleted' | 'not_deleted',
+          bindings()},
+         deletions()) ->
+            deletions().
+-spec new_deletions() -> deletions().
 
 %%----------------------------------------------------------------------------
 
 -define(INFO_KEYS, [source_name, source_kind,
                     destination_name, destination_kind,
-                    routing_key, arguments]).
+                    routing_key, arguments,
+                    vhost]).
 
 recover(XNames, QNames) ->
     rabbit_misc:table_filter(
@@ -270,6 +272,7 @@ infos(Items, B) -> [{Item, i(Item, B)} || Item <- Items].
 
 i(source_name,      #binding{source      = SrcName})    -> SrcName#resource.name;
 i(source_kind,      #binding{source      = SrcName})    -> SrcName#resource.kind;
+i(vhost,            #binding{source      = SrcName})    -> SrcName#resource.virtual_host;
 i(destination_name, #binding{destination = DstName})    -> DstName#resource.name;
 i(destination_kind, #binding{destination = DstName})    -> DstName#resource.kind;
 i(routing_key,      #binding{key         = RoutingKey}) -> RoutingKey;
@@ -284,6 +287,10 @@ info_all(VHostPath) -> map(VHostPath, fun (B) -> info(B) end).
 
 info_all(VHostPath, Items) -> map(VHostPath, fun (B) -> info(B, Items) end).
 
+info_all(VHostPath, Items, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(
+      AggregatorPid, Ref, fun(B) -> info(B, Items) end, list(VHostPath)).
+
 has_for_source(SrcName) ->
     Match = #route{binding = #binding{source = SrcName, _ = '_'}},
     %% we need to check for semi-durable routes (which subsumes
diff --git a/deps/rabbit/src/rabbit_boot_steps.erl b/deps/rabbit/src/rabbit_boot_steps.erl
new file mode 100644 (file)
index 0000000..2136687
--- /dev/null
@@ -0,0 +1,97 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_boot_steps).
+
+-export([run_boot_steps/0, run_boot_steps/1, run_cleanup_steps/1]).
+-export([find_steps/0, find_steps/1]).
+
+run_boot_steps() ->
+    run_boot_steps(loaded_applications()).
+
+run_boot_steps(Apps) ->
+    [ok = run_step(Attrs, mfa) || {_, _, Attrs} <- find_steps(Apps)],
+    ok.
+
+run_cleanup_steps(Apps) ->
+    [run_step(Attrs, cleanup) || {_, _, Attrs} <- find_steps(Apps)],
+    ok.
+
+loaded_applications() ->
+    [App || {App, _, _} <- application:loaded_applications()].
+
+find_steps() ->
+    find_steps(loaded_applications()).
+
+find_steps(Apps) ->
+    All = sort_boot_steps(rabbit_misc:all_module_attributes(rabbit_boot_step)),
+    [Step || {App, _, _} = Step <- All, lists:member(App, Apps)].
+
+run_step(Attributes, AttributeName) ->
+    case [MFA || {Key, MFA} <- Attributes,
+                 Key =:= AttributeName] of
+        [] ->
+            ok;
+        MFAs ->
+            [case apply(M,F,A) of
+                 ok              -> ok;
+                 {error, Reason} -> exit({error, Reason})
+             end || {M,F,A} <- MFAs],
+            ok
+    end.
+
+vertices({AppName, _Module, Steps}) ->
+    [{StepName, {AppName, StepName, Atts}} || {StepName, Atts} <- Steps].
+
+edges({_AppName, _Module, Steps}) ->
+    EnsureList = fun (L) when is_list(L) -> L;
+                     (T)                 -> [T]
+                 end,
+    [case Key of
+         requires -> {StepName, OtherStep};
+         enables  -> {OtherStep, StepName}
+     end || {StepName, Atts} <- Steps,
+            {Key, OtherStepOrSteps} <- Atts,
+            OtherStep <- EnsureList(OtherStepOrSteps),
+            Key =:= requires orelse Key =:= enables].
+
+sort_boot_steps(UnsortedSteps) ->
+    case rabbit_misc:build_acyclic_graph(fun vertices/1, fun edges/1,
+                                         UnsortedSteps) of
+        {ok, G} ->
+            %% Use topological sort to find a consistent ordering (if
+            %% there is one, otherwise fail).
+            SortedSteps = lists:reverse(
+                            [begin
+                                 {StepName, Step} = digraph:vertex(G,
+                                                                   StepName),
+                                 Step
+                             end || StepName <- digraph_utils:topsort(G)]),
+            digraph:delete(G),
+            %% Check that all mentioned {M,F,A} triples are exported.
+            case [{StepName, {M,F,A}} ||
+                     {_App, StepName, Attributes} <- SortedSteps,
+                     {mfa, {M,F,A}}               <- Attributes,
+                     code:ensure_loaded(M) =/= {module, M} orelse
+                     not erlang:function_exported(M, F, length(A))] of
+                []         -> SortedSteps;
+                MissingFns -> exit({boot_functions_not_exported, MissingFns})
+            end;
+        {error, {vertex, duplicate, StepName}} ->
+            exit({duplicate_boot_step, StepName});
+        {error, {edge, Reason, From, To}} ->
+            exit({invalid_boot_step_dependency, From, To, Reason})
+    end.
similarity index 80%
rename from rabbitmq-server/src/rabbit_channel_sup.erl
rename to deps/rabbit/src/rabbit_channel_sup.erl
index e8f45f7305a58421e38c719135efa6fa0c12a261..48cc1e15c2a2a31c4631ee1cb45e703b6f3fa71d 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_channel_sup).
 
+%% Supervises processes that implement AMQP 0-9-1 channels:
+%%
+%%  * Channel process itself
+%%  * Network writer (for network connections)
+%%  * Limiter (handles channel QoS and flow control)
+%%
+%% Every rabbit_channel_sup is supervised by rabbit_channel_sup_sup.
+%%
+%% See also rabbit_channel, rabbit_writer, rabbit_limiter.
+
 -behaviour(supervisor2).
 
 -export([start_link/1]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([start_link_args/0]).
 
--type(start_link_args() ::
+-type start_link_args() ::
         {'tcp', rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), pid(), string(), rabbit_types:protocol(),
          rabbit_types:user(), rabbit_types:vhost(), rabbit_framing:amqp_table(),
          pid()} |
         {'direct', rabbit_channel:channel_number(), pid(), string(),
          rabbit_types:protocol(), rabbit_types:user(), rabbit_types:vhost(),
-         rabbit_framing:amqp_table(), pid()}).
+         rabbit_framing:amqp_table(), pid()}.
 
--spec(start_link/1 :: (start_link_args()) -> {'ok', pid(), {pid(), any()}}).
+-spec start_link(start_link_args()) -> {'ok', pid(), {pid(), any()}}.
 
--endif.
+-define(FAIR_WAIT, 70000).
 
 %%----------------------------------------------------------------------------
 
@@ -59,7 +67,7 @@ start_link({tcp, Sock, Channel, FrameMax, ReaderPid, ConnName, Protocol, User,
                      [Channel, ReaderPid, WriterPid, ReaderPid, ConnName,
                       Protocol, User, VHost, Capabilities, Collector,
                       LimiterPid]},
-           intrinsic, ?MAX_WAIT, worker, [rabbit_channel]}),
+           intrinsic, ?FAIR_WAIT, worker, [rabbit_channel]}),
     {ok, AState} = rabbit_command_assembler:init(Protocol),
     {ok, SupPid, {ChannelPid, AState}};
 start_link({direct, Channel, ClientChannelPid, ConnPid, ConnName, Protocol,
@@ -74,7 +82,7 @@ start_link({direct, Channel, ClientChannelPid, ConnPid, ConnName, Protocol,
                      [Channel, ClientChannelPid, ClientChannelPid, ConnPid,
                       ConnName, Protocol, User, VHost, Capabilities, Collector,
                       LimiterPid]},
-           intrinsic, ?MAX_WAIT, worker, [rabbit_channel]}),
+           intrinsic, ?FAIR_WAIT, worker, [rabbit_channel]}),
     {ok, SupPid, {ChannelPid, none}}.
 
 %%----------------------------------------------------------------------------
@@ -85,8 +93,8 @@ init(Type) ->
 child_specs({tcp, Sock, Channel, FrameMax, ReaderPid, Protocol, Identity}) ->
     [{writer, {rabbit_writer, start_link,
                [Sock, Channel, FrameMax, Protocol, ReaderPid, Identity, true]},
-      intrinsic, ?MAX_WAIT, worker, [rabbit_writer]}
+      intrinsic, ?FAIR_WAIT, worker, [rabbit_writer]}
      | child_specs({direct, Identity})];
 child_specs({direct, Identity}) ->
     [{limiter, {rabbit_limiter, start_link, [Identity]},
-      transient, ?MAX_WAIT, worker, [rabbit_limiter]}].
+      transient, ?FAIR_WAIT, worker, [rabbit_limiter]}].
similarity index 76%
rename from rabbitmq-server/src/rabbit_channel_sup_sup.erl
rename to deps/rabbit/src/rabbit_channel_sup_sup.erl
index 2be2af91a7ae039a1f1ea25f33c348a8385df4ee..885d34d0a708ce72dd8d724753407dc4e9654175 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_channel_sup_sup).
 
+%% Supervisor for AMQP 0-9-1 channels. Every AMQP 0-9-1 connection has
+%% one of these.
+%%
+%% See also rabbit_channel_sup, rabbit_connection_helper_sup, rabbit_reader.
+
 -behaviour(supervisor2).
 
 -export([start_link/0, start_channel/2]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start_channel/2 :: (pid(), rabbit_channel_sup:start_link_args()) ->
-                              {'ok', pid(), {pid(), any()}}).
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start_channel(pid(), rabbit_channel_sup:start_link_args()) ->
+          {'ok', pid(), {pid(), any()}}.
 
 %%----------------------------------------------------------------------------
 
similarity index 59%
rename from rabbitmq-server/src/rabbit_cli.erl
rename to deps/rabbit/src/rabbit_cli.erl
index 33098ce16b4997744ee35f210322ed3b656e2a87..c0e5c93247b7faedd5775e78b3a12f2c23d21e26 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_cli).
 -include("rabbit_cli.hrl").
 
 -export([main/3, start_distribution/0, start_distribution/1,
-         parse_arguments/4, rpc_call/4, rpc_call/5]).
+         parse_arguments/4, mutually_exclusive_flags/3,
+         rpc_call/4, rpc_call/5, rpc_call/7]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type option_name() :: string().
+-type option_value() :: string() | node() | boolean().
+-type optdef() :: flag | {option, string()}.
+-type parse_result() :: {'ok', {atom(), [{option_name(), option_value()}], [string()]}} |
+                        'no_command'.
 
--type(optdef() :: flag | {option, string()}).
--type(parse_result() :: {'ok', {atom(), [{string(), string()}], [string()]}} |
-                        'no_command').
+-spec main
+        (fun (([string()], string()) -> parse_result()),
+         fun ((atom(), atom(), [any()], [any()]) -> any()),
+         atom()) ->
+            no_return().
+-spec start_distribution() -> {'ok', pid()} | {'error', any()}.
+-spec start_distribution(string()) -> {'ok', pid()} | {'error', any()}.
+-spec usage(atom()) -> no_return().
+-spec parse_arguments
+        ([{atom(), [{string(), optdef()}]} | atom()],
+         [{string(), optdef()}], string(), [string()]) ->
+          parse_result().
 
+-spec mutually_exclusive_flags([{option_name(), option_value()}], term(), [{option_name(), term()}]) -> {ok, term()} | {error, string()}.
 
--spec(main/3 :: (fun (([string()], string()) -> parse_result()),
-                     fun ((atom(), atom(), [any()], [any()]) -> any()),
-                         atom()) -> no_return()).
--spec(start_distribution/0 :: () -> {'ok', pid()} | {'error', any()}).
--spec(start_distribution/1 :: (string()) -> {'ok', pid()} | {'error', any()}).
--spec(usage/1 :: (atom()) -> no_return()).
--spec(parse_arguments/4 ::
-        ([{atom(), [{string(), optdef()}]} | atom()],
-         [{string(), optdef()}], string(), [string()]) -> parse_result()).
--spec(rpc_call/4 :: (node(), atom(), atom(), [any()]) -> any()).
+-spec rpc_call(node(), atom(), atom(), [any()]) -> any().
+-spec rpc_call(node(), atom(), atom(), [any()], number()) -> any().
+-spec rpc_call
+        (node(), atom(), atom(), [any()], reference(), pid(), number()) ->
+            any().
 
--endif.
+ensure_cli_distribution() ->
+    case start_distribution() of
+        {ok, _} ->
+            ok;
+        {error, Error} ->
+            print_error("Failed to initialize erlang distribution: ~p.",
+                        [Error]),
+            rabbit_misc:quit(?EX_TEMPFAIL)
+    end.
 
 %%----------------------------------------------------------------------------
 
 main(ParseFun, DoFun, UsageMod) ->
     error_logger:tty(false),
-    start_distribution(),
+    ensure_cli_distribution(),
     {ok, [[NodeStr|_]|_]} = init:get_argument(nodename),
     {Command, Opts, Args} =
         case ParseFun(init:get_plain_arguments(), NodeStr) of
@@ -65,7 +83,10 @@ main(ParseFun, DoFun, UsageMod) ->
     %% thrown errors into normal return values
     case catch DoFun(Command, Node, Args, Opts) of
         ok ->
-            rabbit_misc:quit(0);
+            rabbit_misc:quit(?EX_OK);
+        {ok, Result} ->
+            rabbit_control_misc:print_cmd_result(Command, Result),
+            rabbit_misc:quit(?EX_OK);
         {'EXIT', {function_clause, [{?MODULE, action, _}    | _]}} -> %% < R15
             PrintInvalidCommandError(),
             usage(UsageMod);
@@ -75,45 +96,69 @@ main(ParseFun, DoFun, UsageMod) ->
         {error, {missing_dependencies, Missing, Blame}} ->
             print_error("dependent plugins ~p not found; used by ~p.",
                         [Missing, Blame]),
-            rabbit_misc:quit(2);
+            rabbit_misc:quit(?EX_CONFIG);
         {'EXIT', {badarg, _}} ->
             print_error("invalid parameter: ~p", [Args]),
-            usage(UsageMod);
+            usage(UsageMod, ?EX_DATAERR);
         {error, {Problem, Reason}} when is_atom(Problem), is_binary(Reason) ->
             %% We handle this common case specially to avoid ~p since
             %% that has i18n issues
             print_error("~s: ~s", [Problem, Reason]),
-            rabbit_misc:quit(2);
+            rabbit_misc:quit(?EX_SOFTWARE);
         {error, Reason} ->
             print_error("~p", [Reason]),
-            rabbit_misc:quit(2);
+            rabbit_misc:quit(?EX_SOFTWARE);
         {error_string, Reason} ->
             print_error("~s", [Reason]),
-            rabbit_misc:quit(2);
+            rabbit_misc:quit(?EX_SOFTWARE);
         {badrpc, {'EXIT', Reason}} ->
             print_error("~p", [Reason]),
-            rabbit_misc:quit(2);
+            rabbit_misc:quit(?EX_SOFTWARE);
         {badrpc, Reason} ->
             case Reason of
                 timeout ->
-                    print_error("operation ~w on node ~w timed out", [Command, Node]);
+                    print_error("operation ~w on node ~w timed out", [Command, Node]),
+                    rabbit_misc:quit(?EX_TEMPFAIL);
                 _ ->
                     print_error("unable to connect to node ~w: ~w", [Node, Reason]),
-                    print_badrpc_diagnostics([Node])
-            end,
-            rabbit_misc:quit(2);
+                    print_badrpc_diagnostics([Node]),
+                    case Command of
+                        stop -> rabbit_misc:quit(?EX_OK);
+                        _    -> rabbit_misc:quit(?EX_UNAVAILABLE)
+                    end
+            end;
         {badrpc_multi, Reason, Nodes} ->
             print_error("unable to connect to nodes ~p: ~w", [Nodes, Reason]),
             print_badrpc_diagnostics(Nodes),
-            rabbit_misc:quit(2);
+            rabbit_misc:quit(?EX_UNAVAILABLE);
+        function_clause ->
+            print_error("operation ~w used with invalid parameter: ~p",
+                        [Command, Args]),
+            usage(UsageMod);
+        {refused, Username, _, _} ->
+            print_error("failed to authenticate user \"~s\"", [Username]),
+            rabbit_misc:quit(?EX_NOUSER);
         Other ->
             print_error("~p", [Other]),
-            rabbit_misc:quit(2)
+            rabbit_misc:quit(?EX_SOFTWARE)
+    end.
+
+start_distribution_anon(0, LastError) ->
+    {error, LastError};
+start_distribution_anon(TriesLeft, _) ->
+    NameCandidate = generate_cli_node_name(),
+    case net_kernel:start([NameCandidate, name_type()]) of
+        {ok, _} = Result ->
+            Result;
+        {error, Reason} ->
+            start_distribution_anon(TriesLeft - 1, Reason)
     end.
 
+%% Tries to start distribution with random name choosen from limited list of candidates - to
+%% prevent atom table pollution on target nodes.
 start_distribution() ->
-    start_distribution(list_to_atom(
-                         rabbit_misc:format("rabbitmq-cli-~s", [os:getpid()]))).
+    rabbit_nodes:ensure_epmd(),
+    start_distribution_anon(10, undefined).
 
 start_distribution(Name) ->
     rabbit_nodes:ensure_epmd(),
@@ -125,9 +170,28 @@ name_type() ->
         _      -> shortnames
     end.
 
+generate_cli_node_name() ->
+    Base = rabbit_misc:format("rabbitmq-cli-~2..0b", [rand_compat:uniform(100)]),
+    NameAsList =
+        case {name_type(), inet_db:res_option(domain)} of
+            {longnames, []} ->
+                %% Distribution will fail to start if it's unable to
+                %% determine FQDN of a node (with at least one dot in
+                %% a name).
+                %% CLI is always an initiator of connection, so it
+                %% doesn't matter if the name will not resolve.
+                Base ++ "@" ++ inet_db:gethostname() ++ ".no-domain";
+            _ ->
+                Base
+        end,
+    list_to_atom(NameAsList).
+
 usage(Mod) ->
+    usage(Mod, ?EX_USAGE).
+
+usage(Mod, ExitCode) ->
     io:format("~s", [Mod:usage()]),
-    rabbit_misc:quit(1).
+    rabbit_misc:quit(ExitCode).
 
 %%----------------------------------------------------------------------------
 
@@ -201,6 +265,24 @@ process_opts(Defs, C, [A | As], Found, KVs, Outs) ->
         {none, _, _}     -> no_command
     end.
 
+mutually_exclusive_flags(CurrentOptionValues, Default, FlagsAndValues) ->
+    PresentFlags = lists:filtermap(fun({OptName, _} = _O) ->
+                                           proplists:get_bool(OptName, CurrentOptionValues)
+                                   end,
+                             FlagsAndValues),
+    case PresentFlags of
+        [] ->
+            {ok, Default};
+        [{_, Value}] ->
+            {ok, Value};
+        _ ->
+            Names = [ [$', N, $']  || {N, _} <- PresentFlags ],
+            CommaSeparated = string:join(lists:droplast(Names), ", "),
+            AndOneMore = lists:last(Names),
+            Msg = io_lib:format("Options ~s and ~s are mutually exclusive", [CommaSeparated, AndOneMore]),
+            {error, lists:flatten(Msg)}
+    end.
+
 %%----------------------------------------------------------------------------
 
 fmt_stderr(Format, Args) -> rabbit_misc:format_stderr(Format ++ "~n", Args).
@@ -215,11 +297,10 @@ print_badrpc_diagnostics(Nodes) ->
 %% a timeout unless we set our ticktime to be the same. So let's do
 %% that.
 rpc_call(Node, Mod, Fun, Args) ->
-    rpc_call(Node, Mod, Fun, Args, ?RPC_TIMEOUT).
+    rabbit_misc:rpc_call(Node, Mod, Fun, Args).
 
 rpc_call(Node, Mod, Fun, Args, Timeout) ->
-    case rpc:call(Node, net_kernel, get_net_ticktime, [], Timeout) of
-        {badrpc, _} = E -> E;
-        Time            -> net_kernel:set_net_ticktime(Time, 0),
-                           rpc:call(Node, Mod, Fun, Args, Timeout)
-    end.
+    rabbit_misc:rpc_call(Node, Mod, Fun, Args, Timeout).
+
+rpc_call(Node, Mod, Fun, Args, Ref, Pid, Timeout) ->
+    rabbit_misc:rpc_call(Node, Mod, Fun, Args, Ref, Pid, Timeout).
similarity index 71%
rename from rabbitmq-server/src/rabbit_client_sup.erl
rename to deps/rabbit/src/rabbit_client_sup.erl
index 5348d012d583197671ee1c50172e4e2146f99c87..77f0bcb9938636945559976edd5f2909948bb15d 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_client_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/1 :: (rabbit_types:mfargs()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(start_link/2 :: ({'local', atom()}, rabbit_types:mfargs()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(start_link_worker/2 :: ({'local', atom()}, rabbit_types:mfargs()) ->
-                                  rabbit_types:ok_pid_or_error()).
-
--endif.
+-spec start_link(rabbit_types:mfargs()) ->
+          rabbit_types:ok_pid_or_error().
+-spec start_link({'local', atom()}, rabbit_types:mfargs()) ->
+          rabbit_types:ok_pid_or_error().
+-spec start_link_worker({'local', atom()}, rabbit_types:mfargs()) ->
+          rabbit_types:ok_pid_or_error().
 
 %%----------------------------------------------------------------------------
 
@@ -53,5 +49,4 @@ init({M,F,A}) ->
           [{client, {M,F,A}, temporary, infinity, supervisor, [M]}]}};
 init({{M,F,A}, worker}) ->
     {ok, {{simple_one_for_one, 0, 1},
-          [{client, {M,F,A}, temporary, ?MAX_WAIT, worker, [M]}]}}.
-
+          [{client, {M,F,A}, temporary, ?WORKER_WAIT, worker, [M]}]}}.
similarity index 71%
rename from rabbitmq-server/src/rabbit_connection_helper_sup.erl
rename to deps/rabbit/src/rabbit_connection_helper_sup.erl
index d3c05ee4161a68e8f16fe3f0c4754142d39d27c4..bde520b74b8c2b4017f071c7de6526ea454d3d43 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_connection_helper_sup).
 
+%% Supervises auxiliary processes of AMQP 0-9-1 connections:
+%%
+%%  * Channel supervisor
+%%  * Heartbeat receiver
+%%  * Heartbeat sender
+%%  * Exclusive queue collector
+%%
+%% See also rabbit_heartbeat, rabbit_channel_sup_sup, rabbit_queue_collector.
+
 -behaviour(supervisor2).
 
 -export([start_link/0]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start_channel_sup_sup/1 :: (pid()) -> rabbit_types:ok_pid_or_error()).
--spec(start_queue_collector/2 :: (pid(), rabbit_types:proc_name()) ->
-                                      rabbit_types:ok_pid_or_error()).
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start_channel_sup_sup(pid()) -> rabbit_types:ok_pid_or_error().
+-spec start_queue_collector(pid(), rabbit_types:proc_name()) ->
+          rabbit_types:ok_pid_or_error().
 
 %%----------------------------------------------------------------------------
 
@@ -50,10 +57,9 @@ start_queue_collector(SupPid, Identity) ->
     supervisor2:start_child(
       SupPid,
       {collector, {rabbit_queue_collector, start_link, [Identity]},
-       intrinsic, ?MAX_WAIT, worker, [rabbit_queue_collector]}).
+       intrinsic, ?WORKER_WAIT, worker, [rabbit_queue_collector]}).
 
 %%----------------------------------------------------------------------------
 
 init([]) ->
     {ok, {{one_for_one, 10, 10}, []}}.
-
similarity index 76%
rename from rabbitmq-server/src/rabbit_connection_sup.erl
rename to deps/rabbit/src/rabbit_connection_sup.erl
index 982608556aba9a592615e121cc6f1641ea1dec39..154bbb1922451680885b3a69e0f03d203001a38b 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_connection_sup).
 
+%% Supervisor for a (network) AMQP 0-9-1 client connection.
+%%
+%% Supervises
+%%
+%%  * rabbit_reader
+%%  * Auxiliary process supervisor
+%%
+%% See also rabbit_reader, rabbit_connection_helper_sup.
+
 -behaviour(supervisor2).
+-behaviour(ranch_protocol).
 
--export([start_link/0, reader/1]).
+-export([start_link/4, reader/1]).
 
 -export([init/1]).
 
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> {'ok', pid(), pid()}).
--spec(reader/1 :: (pid()) -> pid()).
-
--endif.
+-spec start_link(any(), rabbit_net:socket(), module(), any()) ->
+          {'ok', pid(), pid()}.
+-spec reader(pid()) -> pid().
 
 %%--------------------------------------------------------------------------
 
-start_link() ->
+start_link(Ref, Sock, _Transport, _Opts) ->
     {ok, SupPid} = supervisor2:start_link(?MODULE, []),
     %% We need to get channels in the hierarchy here so they get shut
     %% down after the reader, so the reader gets a chance to terminate
@@ -55,8 +62,8 @@ start_link() ->
     {ok, ReaderPid} =
         supervisor2:start_child(
           SupPid,
-          {reader, {rabbit_reader, start_link, [HelperSup]},
-           intrinsic, ?MAX_WAIT, worker, [rabbit_reader]}),
+          {reader, {rabbit_reader, start_link, [HelperSup, Ref, Sock]},
+           intrinsic, ?WORKER_WAIT, worker, [rabbit_reader]}),
     {ok, SupPid, ReaderPid}.
 
 reader(Pid) ->
similarity index 67%
rename from rabbitmq-server/src/rabbit_control_main.erl
rename to deps/rabbit/src/rabbit_control_main.erl
index fe0563bbc7c9563ffb12c8d6e70488349099c3dc..8c245892b7cbc23c120b5d860ebe857510db6a98 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_control_main).
 -include("rabbit.hrl").
 -include("rabbit_cli.hrl").
+-include("rabbit_misc.hrl").
 
--export([start/0, stop/0, parse_arguments/2, action/5,
+-export([start/0, stop/0, parse_arguments/2, action/5, action/6,
          sync_queue/1, cancel_sync_queue/1, become/1,
          purge_queue/1]).
 
--import(rabbit_cli, [rpc_call/4, rpc_call/5]).
+-import(rabbit_misc, [rpc_call/4, rpc_call/5, rpc_call/7]).
 
 -define(EXTERNAL_CHECK_INTERVAL, 1000).
 
@@ -36,6 +37,7 @@
          reset,
          force_reset,
          rotate_logs,
+         hipe_compile,
 
          {join_cluster, [?RAM_DEF]},
          change_cluster_node_type,
@@ -52,6 +54,7 @@
          delete_user,
          change_password,
          clear_password,
+         authenticate_user,
          set_user_tags,
          list_users,
 
@@ -71,7 +74,7 @@
          {clear_policy, [?VHOST_DEF]},
          {list_policies, [?VHOST_DEF]},
 
-         {list_queues, [?VHOST_DEF]},
+         {list_queues, [?VHOST_DEF, ?OFFLINE_DEF, ?ONLINE_DEF, ?LOCAL_DEF]},
          {list_exchanges, [?VHOST_DEF]},
          {list_bindings, [?VHOST_DEF]},
          {list_connections, [?VHOST_DEF]},
          report,
          set_cluster_name,
          eval,
+         node_health_check,
 
          close_connection,
          {trace_on, [?VHOST_DEF]},
          {trace_off, [?VHOST_DEF]},
-         set_vm_memory_high_watermark
+         set_vm_memory_high_watermark,
+         set_disk_free_limit,
+         help,
+         {encode, [?DECODE_DEF, ?CIPHER_DEF, ?HASH_DEF, ?ITERATIONS_DEF, ?LIST_CIPHERS_DEF, ?LIST_HASHES_DEF]}
         ]).
 
 -define(GLOBAL_QUERIES,
         [stop, stop_app, start_app, wait, reset, force_reset, rotate_logs,
          join_cluster, change_cluster_node_type, update_cluster_nodes,
          forget_cluster_node, rename_cluster_node, cluster_status, status,
-         environment, eval, force_boot]).
+         environment, eval, force_boot, help, hipe_compile, encode]).
 
+%% [Command | {Command, DefaultTimeoutInMilliSeconds}]
 -define(COMMANDS_WITH_TIMEOUT,
         [list_user_permissions, list_policies, list_queues, list_exchanges,
-        list_bindings, list_connections, list_channels, list_consumers,
-        list_vhosts, list_parameters,
-        purge_queue]).
+         list_bindings, list_connections, list_channels, list_consumers,
+         list_vhosts, list_parameters,
+         purge_queue,
+         {node_health_check, 70000}]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start/0 :: () -> no_return()).
--spec(stop/0 :: () -> 'ok').
--spec(action/5 ::
+-spec start() -> no_return().
+-spec stop() -> 'ok'.
+-spec action
         (atom(), node(), [string()], [{string(), any()}],
-         fun ((string(), [any()]) -> 'ok'))
-        -> 'ok').
+         fun ((string(), [any()]) -> 'ok')) ->
+            'ok'.
 
--spec(action/6 ::
+-spec action
         (atom(), node(), [string()], [{string(), any()}],
-         fun ((string(), [any()]) -> 'ok'), timeout())
-        -> 'ok').
-
--endif.
+         fun ((string(), [any()]) -> 'ok'), timeout()) ->
+            'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -150,7 +155,7 @@ start() ->
                                     end
                        end,
               try
-                  T = case get_timeout(Opts) of
+                  T = case get_timeout(Command, Opts) of
                           {ok, Timeout} ->
                               Timeout;
                           {error, _} ->
@@ -185,8 +190,23 @@ print_report0(Node, {Module, InfoFun, KeysFun}, VHostArg) ->
     end,
     io:nl().
 
-get_timeout(Opts) ->
-    parse_timeout(proplists:get_value(?TIMEOUT_OPT, Opts, ?RPC_TIMEOUT)).
+get_timeout(Command, Opts) ->
+    Default = case proplists:lookup(Command, ?COMMANDS_WITH_TIMEOUT) of
+                  none ->
+                      infinity;
+                  {Command, true} ->
+                      ?RPC_TIMEOUT;
+                  {Command, D} ->
+                      D
+              end,
+    Result = case proplists:get_value(?TIMEOUT_OPT, Opts, Default) of
+        use_default ->
+            parse_timeout(Default);
+        Value ->
+            parse_timeout(Value)
+    end,
+    Result.
+
 
 parse_number(N) when is_list(N) ->
     try list_to_integer(N) of
@@ -232,11 +252,11 @@ do_action(Command, Node, Args, Opts, Inform, Timeout) ->
         false ->
             case ensure_app_running(Node) of
                 ok ->
-                    case lists:member(Command, ?COMMANDS_WITH_TIMEOUT) of
-                        true  ->
+                    case proplists:lookup(Command, ?COMMANDS_WITH_TIMEOUT) of
+                        {Command, _}  ->
                             announce_timeout(Timeout, Inform),
                             action(Command, Node, Args, Opts, Inform, Timeout);
-                        false ->
+                        none ->
                             action(Command, Node, Args, Opts, Inform)
                     end;
                 E  -> E
@@ -266,11 +286,17 @@ action(start_app, Node, [], _Opts, Inform) ->
 
 action(reset, Node, [], _Opts, Inform) ->
     Inform("Resetting node ~p", [Node]),
-    call(Node, {rabbit_mnesia, reset, []});
+    require_mnesia_stopped(Node, 
+                           fun() ->
+                                   call(Node, {rabbit_mnesia, reset, []})
+                           end);
 
 action(force_reset, Node, [], _Opts, Inform) ->
     Inform("Forcefully resetting node ~p", [Node]),
-    call(Node, {rabbit_mnesia, force_reset, []});
+    require_mnesia_stopped(Node, 
+                           fun() ->
+                                   call(Node, {rabbit_mnesia, force_reset, []})
+                           end);
 
 action(join_cluster, Node, [ClusterNodeS], Opts, Inform) ->
     ClusterNode = list_to_atom(ClusterNodeS),
@@ -279,20 +305,32 @@ action(join_cluster, Node, [ClusterNodeS], Opts, Inform) ->
                    false -> disc
                end,
     Inform("Clustering node ~p with ~p", [Node, ClusterNode]),
-    rpc_call(Node, rabbit_mnesia, join_cluster, [ClusterNode, NodeType]);
+    require_mnesia_stopped(Node, 
+                           fun() ->
+                                   rpc_call(Node, rabbit_mnesia, join_cluster, [ClusterNode, NodeType])
+                           end);
 
 action(change_cluster_node_type, Node, ["ram"], _Opts, Inform) ->
     Inform("Turning ~p into a ram node", [Node]),
-    rpc_call(Node, rabbit_mnesia, change_cluster_node_type, [ram]);
+    require_mnesia_stopped(Node, 
+                           fun() ->
+                                   rpc_call(Node, rabbit_mnesia, change_cluster_node_type, [ram])
+                           end);
 action(change_cluster_node_type, Node, [Type], _Opts, Inform)
   when Type =:= "disc" orelse Type =:= "disk" ->
     Inform("Turning ~p into a disc node", [Node]),
-    rpc_call(Node, rabbit_mnesia, change_cluster_node_type, [disc]);
+    require_mnesia_stopped(Node, 
+                           fun() ->
+                                   rpc_call(Node, rabbit_mnesia, change_cluster_node_type, [disc])
+                           end);
 
 action(update_cluster_nodes, Node, [ClusterNodeS], _Opts, Inform) ->
     ClusterNode = list_to_atom(ClusterNodeS),
     Inform("Updating cluster nodes for ~p from ~p", [Node, ClusterNode]),
-    rpc_call(Node, rabbit_mnesia, update_cluster_nodes, [ClusterNode]);
+    require_mnesia_stopped(Node, 
+                          fun() ->
+                                  rpc_call(Node, rabbit_mnesia, update_cluster_nodes, [ClusterNode])
+                          end);
 
 action(forget_cluster_node, Node, [ClusterNodeS], Opts, Inform) ->
     ClusterNode = list_to_atom(ClusterNodeS),
@@ -344,7 +382,10 @@ action(status, Node, [], _Opts, Inform) ->
 
 action(cluster_status, Node, [], _Opts, Inform) ->
     Inform("Cluster status of node ~p", [Node]),
-    display_call_result(Node, {rabbit_mnesia, status, []});
+    Status = unsafe_rpc(Node, rabbit_mnesia, status, []),
+    io:format("~p~n", [Status ++ [{alarms,
+        [alarms_by_node(Name) || Name <- nodes_in_cluster(Node)]}]]),
+    ok;
 
 action(environment, Node, _App, _Opts, Inform) ->
     Inform("Application environment of node ~p", [Node]),
@@ -357,6 +398,16 @@ action(rotate_logs, Node, Args = [Suffix], _Opts, Inform) ->
     Inform("Rotating logs to files with suffix \"~s\"", [Suffix]),
     call(Node, {rabbit, rotate_logs, Args});
 
+action(hipe_compile, _Node, [TargetDir], _Opts, _Inform) ->
+    ok = application:load(rabbit),
+    case rabbit_hipe:can_hipe_compile() of
+        true ->
+            {ok, _, _} = rabbit_hipe:compile_to_directory(TargetDir),
+            ok;
+        false ->
+            {error, "HiPE compilation is not supported"}
+    end;
+
 action(close_connection, Node, [PidStr, Explanation], _Opts, Inform) ->
     Inform("Closing connection \"~s\"", [PidStr]),
     rpc_call(Node, rabbit_networking, close_connection,
@@ -378,6 +429,10 @@ action(clear_password, Node, Args = [Username], _Opts, Inform) ->
     Inform("Clearing password for user \"~s\"", [Username]),
     call(Node, {rabbit_auth_backend_internal, clear_password, Args});
 
+action(authenticate_user, Node, Args = [Username, _Password], _Opts, Inform) ->
+    Inform("Authenticating user \"~s\"", [Username]),
+    call(Node, {rabbit_access_control, check_user_pass_login, Args});
+
 action(set_user_tags, Node, [Username | TagsStr], _Opts, Inform) ->
     Tags = [list_to_atom(T) || T <- TagsStr],
     Inform("Setting tags for user \"~s\" to ~p", [Username, Tags]),
@@ -410,6 +465,39 @@ action(set_vm_memory_high_watermark, Node, [Arg], _Opts, Inform) ->
     Inform("Setting memory threshold on ~p to ~p", [Node, Frac]),
     rpc_call(Node, vm_memory_monitor, set_vm_memory_high_watermark, [Frac]);
 
+action(set_vm_memory_high_watermark, Node, ["absolute", Arg], _Opts, Inform) ->
+    case rabbit_resource_monitor_misc:parse_information_unit(Arg) of
+        {ok, Limit} ->
+            Inform("Setting memory threshold on ~p to ~p bytes", [Node, Limit]),
+            rpc_call(Node, vm_memory_monitor, set_vm_memory_high_watermark,
+                 [{absolute, Limit}]);
+        {error, parse_error} ->
+            {error_string, rabbit_misc:format(
+                "Unable to parse absolute memory limit value ~p", [Arg])}
+    end;
+
+action(set_disk_free_limit, Node, [Arg], _Opts, Inform) ->
+    case rabbit_resource_monitor_misc:parse_information_unit(Arg) of
+        {ok, Limit} ->
+            Inform("Setting disk free limit on ~p to ~p bytes", [Node, Limit]),
+            rpc_call(Node, rabbit_disk_monitor, set_disk_free_limit, [Limit]);
+        {error, parse_error} ->
+            {error_string, rabbit_misc:format(
+                "Unable to parse disk free limit value ~p", [Arg])}
+    end;
+
+action(set_disk_free_limit, Node, ["mem_relative", Arg], _Opts, Inform) ->
+    Frac = list_to_float(case string:chr(Arg, $.) of
+                             0 -> Arg ++ ".0";
+                             _ -> Arg
+                         end),
+    Inform("Setting disk free limit on ~p to ~p of total RAM", [Node, Frac]),
+    rpc_call(Node, 
+             rabbit_disk_monitor, 
+             set_disk_free_limit, 
+             [{mem_relative, Frac}]);
+
+
 action(set_permissions, Node, [Username, CPerm, WPerm, RPerm], Opts, Inform) ->
     VHost = proplists:get_value(?VHOST_OPT, Opts),
     Inform("Setting permissions for user \"~s\" in vhost \"~s\"",
@@ -445,9 +533,15 @@ action(set_policy, Node, [Key, Pattern, Defn], Opts, Inform) ->
     PriorityArg = proplists:get_value(?PRIORITY_OPT, Opts),
     ApplyToArg = list_to_binary(proplists:get_value(?APPLY_TO_OPT, Opts)),
     Inform(Msg, [Key, Pattern, Defn, PriorityArg]),
-    rpc_call(
+    Res = rpc_call(
       Node, rabbit_policy, parse_set,
-      [VHostArg, list_to_binary(Key), Pattern, Defn, PriorityArg, ApplyToArg]);
+      [VHostArg, list_to_binary(Key), Pattern, Defn, PriorityArg, ApplyToArg]),
+    case Res of
+        {error, Format, Args} when is_list(Format) andalso is_list(Args) ->
+            {error_string, rabbit_misc:format(Format, Args)};
+        _ ->
+            Res
+    end;
 
 action(clear_policy, Node, [Key], Opts, Inform) ->
     VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
@@ -483,6 +577,20 @@ action(eval, Node, [Expr], _Opts, _Inform) ->
             {error_string, format_parse_error(E)}
     end;
 
+action(help, _Node, _Args, _Opts, _Inform) ->
+    io:format("~s", [rabbit_ctl_usage:usage()]);
+
+action(encode, _Node, Args, Opts, _Inform) ->
+    ListCiphers = lists:member({?LIST_CIPHERS_OPT, true}, Opts),
+    ListHashes = lists:member({?LIST_HASHES_OPT, true}, Opts),
+    Decode = lists:member({?DECODE_OPT, true}, Opts),
+    Cipher = list_to_atom(proplists:get_value(?CIPHER_OPT, Opts)),
+    Hash = list_to_atom(proplists:get_value(?HASH_OPT, Opts)),
+    Iterations = list_to_integer(proplists:get_value(?ITERATIONS_OPT, Opts)),
+
+    {_, Msg} = rabbit_control_pbe:encode(ListCiphers, ListHashes, Decode, Cipher, Hash, Iterations, Args),
+    io:format(Msg ++ "~n");
+
 action(Command, Node, Args, Opts, Inform) ->
     %% For backward compatibility, run commands accepting a timeout with
     %% the default timeout.
@@ -499,62 +607,63 @@ action(purge_queue, Node, [Q], Opts, Inform, Timeout) ->
 
 action(list_users, Node, [], _Opts, Inform, Timeout) ->
     Inform("Listing users", []),
-    display_info_list(
-      call(Node, {rabbit_auth_backend_internal, list_users, []}, Timeout),
-      rabbit_auth_backend_internal:user_info_keys());
+    call(Node, {rabbit_auth_backend_internal, list_users, []},
+         rabbit_auth_backend_internal:user_info_keys(), true, Timeout);
 
 action(list_permissions, Node, [], Opts, Inform, Timeout) ->
     VHost = proplists:get_value(?VHOST_OPT, Opts),
     Inform("Listing permissions in vhost \"~s\"", [VHost]),
-    display_info_list(call(Node, {rabbit_auth_backend_internal,
-                             list_vhost_permissions, [VHost]}, Timeout),
-                      rabbit_auth_backend_internal:vhost_perms_info_keys());
+    call(Node, {rabbit_auth_backend_internal, list_vhost_permissions, [VHost]},
+         rabbit_auth_backend_internal:vhost_perms_info_keys(), true, Timeout,
+         true);
 
 action(list_parameters, Node, [], Opts, Inform, Timeout) ->
     VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
     Inform("Listing runtime parameters", []),
-    display_info_list(
-      rpc_call(Node, rabbit_runtime_parameters, list_formatted, [VHostArg],
-               Timeout),
-      rabbit_runtime_parameters:info_keys());
+    call(Node, {rabbit_runtime_parameters, list_formatted, [VHostArg]},
+         rabbit_runtime_parameters:info_keys(), Timeout);
 
 action(list_policies, Node, [], Opts, Inform, Timeout) ->
     VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
     Inform("Listing policies", []),
-    display_info_list(rpc_call(Node, rabbit_policy, list_formatted, [VHostArg],
-                              Timeout),
-                      rabbit_policy:info_keys());
+    call(Node, {rabbit_policy, list_formatted, [VHostArg]},
+         rabbit_policy:info_keys(), Timeout);
 
 action(list_vhosts, Node, Args, _Opts, Inform, Timeout) ->
     Inform("Listing vhosts", []),
     ArgAtoms = default_if_empty(Args, [name]),
-    display_info_list(call(Node, {rabbit_vhost, info_all, []}, Timeout),
-                      ArgAtoms);
+    call(Node, {rabbit_vhost, info_all, []}, ArgAtoms, true, Timeout);
 
 action(list_user_permissions, _Node, _Args = [], _Opts, _Inform, _Timeout) ->
     {error_string,
      "list_user_permissions expects a username argument, but none provided."};
 action(list_user_permissions, Node, Args = [_Username], _Opts, Inform, Timeout) ->
     Inform("Listing permissions for user ~p", Args),
-    display_info_list(call(Node, {rabbit_auth_backend_internal,
-                                  list_user_permissions, Args}, Timeout),
-                      rabbit_auth_backend_internal:user_perms_info_keys());
+    call(Node, {rabbit_auth_backend_internal, list_user_permissions, Args},
+         rabbit_auth_backend_internal:user_perms_info_keys(), true, Timeout,
+         true);
 
 action(list_queues, Node, Args, Opts, Inform, Timeout) ->
-    Inform("Listing queues", []),
-    VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
-    ArgAtoms = default_if_empty(Args, [name, messages]),
-    display_info_list(rpc_call(Node, rabbit_amqqueue, info_all,
-                               [VHostArg, ArgAtoms], Timeout),
-                      ArgAtoms);
+    case rabbit_cli:mutually_exclusive_flags(
+           Opts, all, [{?ONLINE_OPT, online}
+                      ,{?OFFLINE_OPT, offline}
+                      ,{?LOCAL_OPT, local}]) of
+        {ok, Filter} ->
+            Inform("Listing queues", []),
+            VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
+            ArgAtoms = default_if_empty(Args, [name, messages]),
+            call(Node, {rabbit_amqqueue, info_all, [VHostArg, ArgAtoms, Filter]},
+                 ArgAtoms, Timeout);
+        {error, ErrStr} ->
+            {error_string, ErrStr}
+    end;
 
 action(list_exchanges, Node, Args, Opts, Inform, Timeout) ->
     Inform("Listing exchanges", []),
     VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
     ArgAtoms = default_if_empty(Args, [name, type]),
-    display_info_list(rpc_call(Node, rabbit_exchange, info_all,
-                               [VHostArg, ArgAtoms], Timeout),
-                      ArgAtoms);
+    call(Node, {rabbit_exchange, info_all, [VHostArg, ArgAtoms]},
+         ArgAtoms, Timeout);
 
 action(list_bindings, Node, Args, Opts, Inform, Timeout) ->
     Inform("Listing bindings", []),
@@ -562,44 +671,45 @@ action(list_bindings, Node, Args, Opts, Inform, Timeout) ->
     ArgAtoms = default_if_empty(Args, [source_name, source_kind,
                                        destination_name, destination_kind,
                                        routing_key, arguments]),
-    display_info_list(rpc_call(Node, rabbit_binding, info_all,
-                               [VHostArg, ArgAtoms], Timeout),
-                      ArgAtoms);
+    call(Node, {rabbit_binding, info_all, [VHostArg, ArgAtoms]},
+         ArgAtoms, Timeout);
 
 action(list_connections, Node, Args, _Opts, Inform, Timeout) ->
     Inform("Listing connections", []),
     ArgAtoms = default_if_empty(Args, [user, peer_host, peer_port, state]),
-    display_info_list(rpc_call(Node, rabbit_networking, connection_info_all,
-                               [ArgAtoms], Timeout),
-                      ArgAtoms);
+    call(Node, {rabbit_networking, connection_info_all, [ArgAtoms]},
+         ArgAtoms, Timeout);
 
 action(list_channels, Node, Args, _Opts, Inform, Timeout) ->
     Inform("Listing channels", []),
     ArgAtoms = default_if_empty(Args, [pid, user, consumer_count,
                                        messages_unacknowledged]),
-    display_info_list(rpc_call(Node, rabbit_channel, info_all, [ArgAtoms],
-                               Timeout),
-                      ArgAtoms);
+    call(Node, {rabbit_channel, info_all, [ArgAtoms]},
+         ArgAtoms, Timeout);
 
 action(list_consumers, Node, _Args, Opts, Inform, Timeout) ->
     Inform("Listing consumers", []),
     VHostArg = list_to_binary(proplists:get_value(?VHOST_OPT, Opts)),
-    display_info_list(rpc_call(Node, rabbit_amqqueue, consumers_all, [VHostArg],
-                               Timeout),
-                      rabbit_amqqueue:consumer_info_keys()).
-
+    call(Node, {rabbit_amqqueue, consumers_all, [VHostArg]},
+         rabbit_amqqueue:consumer_info_keys(), Timeout);
+
+action(node_health_check, Node, _Args, _Opts, Inform, Timeout) ->
+    Inform("Checking health of node ~p", [Node]),
+    case rabbit_health_check:node(Node, Timeout) of
+        ok ->
+            io:format("Health check passed~n"),
+            ok;
+        Other ->
+            Other
+    end.
 
 format_parse_error({_Line, Mod, Err}) -> lists:flatten(Mod:format_error(Err)).
 
 sync_queue(Q) ->
-    rabbit_amqqueue:with(
-      Q, fun(#amqqueue{pid = QPid}) -> rabbit_amqqueue:sync_mirrors(QPid) end).
+    rabbit_mirror_queue_misc:sync_queue(Q).
 
 cancel_sync_queue(Q) ->
-    rabbit_amqqueue:with(
-      Q, fun(#amqqueue{pid = QPid}) ->
-                 rabbit_amqqueue:cancel_sync_mirrors(QPid)
-         end).
+    rabbit_mirror_queue_misc:cancel_sync_queue(Q).
 
 purge_queue(Q) ->
     rabbit_amqqueue:with(
@@ -610,6 +720,15 @@ purge_queue(Q) ->
 
 %%----------------------------------------------------------------------------
 
+require_mnesia_stopped(Node, Fun) ->
+    case Fun() of
+        {error, mnesia_unexpectedly_running} ->
+            {error_string, rabbit_misc:format(
+                             " Mnesia is still running on node ~p.
+        Please stop the node with rabbitmqctl stop_app first.", [Node])};
+        Other -> Other
+    end.
+
 wait_for_application(Node, PidFile, Application, Inform) ->
     Pid = read_pid_file(PidFile, true),
     Inform("pid is ~s", [Pid]),
@@ -662,10 +781,10 @@ read_pid_file(PidFile, Wait) ->
 
 become(BecomeNode) ->
     error_logger:tty(false),
-    ok = net_kernel:stop(),
     case net_adm:ping(BecomeNode) of
         pong -> exit({node_running, BecomeNode});
-        pang -> io:format("  * Impersonating node: ~s...", [BecomeNode]),
+        pang -> ok = net_kernel:stop(),
+                io:format("  * Impersonating node: ~s...", [BecomeNode]),
                 {ok, _} = rabbit_cli:start_distribution(BecomeNode),
                 io:format(" done~n", []),
                 Dir = mnesia:system_info(directory),
@@ -679,11 +798,33 @@ default_if_empty(List, Default) when is_list(List) ->
        true       -> [list_to_atom(X) || X <- List]
     end.
 
+display_info_message_row(IsEscaped, Result, InfoItemKeys) ->
+    display_row([format_info_item(
+                   case proplists:lookup(X, Result) of
+                       none when is_list(Result), length(Result) > 0 ->
+                           exit({error, {bad_info_key, X}});
+                       none -> Result;
+                       {X, Value} -> Value
+                   end, IsEscaped) || X <- InfoItemKeys]).
+
+display_info_message(IsEscaped) ->
+    fun ([], _) ->
+            ok;
+        ([FirstResult|_] = List, InfoItemKeys) when is_list(FirstResult) ->
+            lists:foreach(fun(Result) ->
+                                  display_info_message_row(IsEscaped, Result, InfoItemKeys)
+                          end,
+                          List),
+            ok;
+        (Result, InfoItemKeys) ->
+            display_info_message_row(IsEscaped, Result, InfoItemKeys)
+    end.
+
 display_info_list(Results, InfoItemKeys) when is_list(Results) ->
     lists:foreach(
       fun (Result) -> display_row(
-                        [format_info_item(proplists:get_value(X, Result)) ||
-                            X <- InfoItemKeys])
+                        [format_info_item(proplists:get_value(X, Result), true)
+                         || X <- InfoItemKeys])
       end, lists:sort(Results)),
     ok;
 display_info_list(Other, _) ->
@@ -696,32 +837,33 @@ display_row(Row) ->
 -define(IS_U8(X),  (X >= 0 andalso X =< 255)).
 -define(IS_U16(X), (X >= 0 andalso X =< 65535)).
 
-format_info_item(#resource{name = Name}) ->
-    escape(Name);
-format_info_item({N1, N2, N3, N4} = Value) when
+format_info_item(#resource{name = Name}, IsEscaped) ->
+    escape(Name, IsEscaped);
+format_info_item({N1, N2, N3, N4} = Value, _IsEscaped) when
       ?IS_U8(N1), ?IS_U8(N2), ?IS_U8(N3), ?IS_U8(N4) ->
     rabbit_misc:ntoa(Value);
-format_info_item({K1, K2, K3, K4, K5, K6, K7, K8} = Value) when
+format_info_item({K1, K2, K3, K4, K5, K6, K7, K8} = Value, _IsEscaped) when
       ?IS_U16(K1), ?IS_U16(K2), ?IS_U16(K3), ?IS_U16(K4),
       ?IS_U16(K5), ?IS_U16(K6), ?IS_U16(K7), ?IS_U16(K8) ->
     rabbit_misc:ntoa(Value);
-format_info_item(Value) when is_pid(Value) ->
+format_info_item(Value, _IsEscaped) when is_pid(Value) ->
     rabbit_misc:pid_to_string(Value);
-format_info_item(Value) when is_binary(Value) ->
-    escape(Value);
-format_info_item(Value) when is_atom(Value) ->
-    escape(atom_to_list(Value));
+format_info_item(Value, IsEscaped) when is_binary(Value) ->
+    escape(Value, IsEscaped);
+format_info_item(Value, IsEscaped) when is_atom(Value) ->
+    escape(atom_to_list(Value), IsEscaped);
 format_info_item([{TableEntryKey, TableEntryType, _TableEntryValue} | _] =
-                     Value) when is_binary(TableEntryKey) andalso
-                                 is_atom(TableEntryType) ->
-    io_lib:format("~1000000000000p", [prettify_amqp_table(Value)]);
-format_info_item([T | _] = Value)
+                     Value, IsEscaped) when is_binary(TableEntryKey) andalso
+                                              is_atom(TableEntryType) ->
+    io_lib:format("~1000000000000p", [prettify_amqp_table(Value, IsEscaped)]);
+format_info_item([T | _] = Value, IsEscaped)
   when is_tuple(T) orelse is_pid(T) orelse is_binary(T) orelse is_atom(T) orelse
        is_list(T) ->
     "[" ++
         lists:nthtail(2, lists:append(
-                           [", " ++ format_info_item(E) || E <- Value])) ++ "]";
-format_info_item(Value) ->
+                           [", " ++ format_info_item(E, IsEscaped)
+                            || E <- Value])) ++ "]";
+format_info_item(Value, _IsEscaped) ->
     io_lib:format("~w", [Value]).
 
 display_call_result(Node, MFA) ->
@@ -751,8 +893,33 @@ ensure_app_running(Node) ->
 call(Node, {Mod, Fun, Args}) ->
     rpc_call(Node, Mod, Fun, lists:map(fun list_to_binary_utf8/1, Args)).
 
-call(Node, {Mod, Fun, Args}, Timeout) ->
-    rpc_call(Node, Mod, Fun, lists:map(fun list_to_binary_utf8/1, Args), Timeout).
+call(Node, {Mod, Fun, Args}, InfoKeys, Timeout) ->
+    call(Node, {Mod, Fun, Args}, InfoKeys, false, Timeout, false).
+
+call(Node, {Mod, Fun, Args}, InfoKeys, ToBinUtf8, Timeout) ->
+    call(Node, {Mod, Fun, Args}, InfoKeys, ToBinUtf8, Timeout, false).
+
+call(Node, {Mod, Fun, Args}, InfoKeys, ToBinUtf8, Timeout, IsEscaped) ->
+    Args0 = case ToBinUtf8 of
+                true  -> lists:map(fun list_to_binary_utf8/1, Args);
+                false -> Args
+            end,
+    Ref = make_ref(),
+    Pid = self(),
+    spawn_link(
+      fun () ->
+              case rabbit_cli:rpc_call(Node, Mod, Fun, Args0,
+                                       Ref, Pid, Timeout) of
+                  {error, _} = Error        ->
+                      Pid ! {error, Error};
+                  {bad_argument, _} = Error ->
+                      Pid ! {error, Error};
+                  _                         ->
+                      ok
+              end
+      end),
+    rabbit_control_misc:wait_for_info_messages(
+      Pid, Ref, InfoKeys, display_info_message(IsEscaped), Timeout).
 
 list_to_binary_utf8(L) ->
     B = list_to_binary(L),
@@ -765,9 +932,14 @@ list_to_binary_utf8(L) ->
 %% characters.  We don't escape characters above 127, since they may
 %% form part of UTF-8 strings.
 
-escape(Atom) when is_atom(Atom)  -> escape(atom_to_list(Atom));
-escape(Bin)  when is_binary(Bin) -> escape(binary_to_list(Bin));
-escape(L)    when is_list(L)     -> escape_char(lists:reverse(L), []).
+escape(Atom, IsEscaped) when is_atom(Atom) ->
+    escape(atom_to_list(Atom), IsEscaped);
+escape(Bin, IsEscaped)  when is_binary(Bin) ->
+    escape(binary_to_list(Bin), IsEscaped);
+escape(L, false) when is_list(L) ->
+    escape_char(lists:reverse(L), []);
+escape(L, true) when is_list(L) ->
+    L. 
 
 escape_char([$\\ | T], Acc) ->
     escape_char(T, [$\\, $\\ | Acc]);
@@ -779,15 +951,30 @@ escape_char([X | T], Acc) ->
 escape_char([], Acc) ->
     Acc.
 
-prettify_amqp_table(Table) ->
-    [{escape(K), prettify_typed_amqp_value(T, V)} || {K, T, V} <- Table].
+prettify_amqp_table(Table, IsEscaped) ->
+    [{escape(K, IsEscaped), prettify_typed_amqp_value(T, V, IsEscaped)}
+     || {K, T, V} <- Table].
 
-prettify_typed_amqp_value(longstr, Value) -> escape(Value);
-prettify_typed_amqp_value(table,   Value) -> prettify_amqp_table(Value);
-prettify_typed_amqp_value(array,   Value) -> [prettify_typed_amqp_value(T, V) ||
-                                                 {T, V} <- Value];
-prettify_typed_amqp_value(_Type,   Value) -> Value.
+prettify_typed_amqp_value(longstr, Value, IsEscaped) ->
+    escape(Value, IsEscaped);
+prettify_typed_amqp_value(table, Value, IsEscaped) ->
+    prettify_amqp_table(Value, IsEscaped);
+prettify_typed_amqp_value(array, Value, IsEscaped) ->
+    [prettify_typed_amqp_value(T, V, IsEscaped) || {T, V} <- Value];
+prettify_typed_amqp_value(_Type, Value, _IsEscaped) ->
+    Value.
 
 split_list([])         -> [];
 split_list([_])        -> exit(even_list_needed);
 split_list([A, B | T]) -> [{A, B} | split_list(T)].
+
+nodes_in_cluster(Node) ->
+    unsafe_rpc(Node, rabbit_mnesia, cluster_nodes, [running]).
+
+alarms_by_node(Name) ->
+    case rpc_call(Name, rabbit, status, []) of
+        {badrpc,nodedown} -> {Name, [nodedown]};
+        Status ->
+            {_, As} = lists:keyfind(alarms, 1, Status),
+            {Name, As}
+    end.
diff --git a/deps/rabbit/src/rabbit_control_pbe.erl b/deps/rabbit/src/rabbit_control_pbe.erl
new file mode 100644 (file)
index 0000000..2fa2c90
--- /dev/null
@@ -0,0 +1,79 @@
+% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_control_pbe).
+
+-export([encode/7]).
+
+% for testing purposes
+-export([evaluate_input_as_term/1]).
+
+encode(ListCiphers, _ListHashes, _Decode, _Cipher, _Hash, _Iterations, _Args) when ListCiphers ->
+    {ok, io_lib:format("~p", [rabbit_pbe:supported_ciphers()])};
+
+encode(_ListCiphers, ListHashes, _Decode, _Cipher, _Hash, _Iterations, _Args) when ListHashes ->
+    {ok, io_lib:format("~p", [rabbit_pbe:supported_hashes()])};
+
+encode(_ListCiphers, _ListHashes, Decode, Cipher, Hash, Iterations, Args) ->
+    CipherExists = lists:member(Cipher, rabbit_pbe:supported_ciphers()),
+    HashExists = lists:member(Hash, rabbit_pbe:supported_hashes()),
+    encode_encrypt_decrypt(CipherExists, HashExists, Decode, Cipher, Hash, Iterations, Args).
+
+encode_encrypt_decrypt(CipherExists, _HashExists, _Decode, _Cipher, _Hash, _Iterations, _Args) when CipherExists =:= false ->
+    {error, io_lib:format("The requested cipher is not supported", [])};
+
+encode_encrypt_decrypt(_CipherExists, HashExists, _Decode, _Cipher, _Hash, _Iterations, _Args) when HashExists =:= false ->
+    {error, io_lib:format("The requested hash is not supported", [])};
+
+encode_encrypt_decrypt(_CipherExists, _HashExists, _Decode, _Cipher, _Hash, Iterations, _Args) when Iterations =< 0 ->
+    {error, io_lib:format("The requested number of iterations is incorrect", [])};
+
+encode_encrypt_decrypt(_CipherExists, _HashExists, Decode, Cipher, Hash, Iterations, Args) when length(Args) == 2, Decode =:= false ->
+    [Value, PassPhrase] = Args,
+    try begin
+            TermValue = evaluate_input_as_term(Value),
+            Result = rabbit_pbe:encrypt_term(Cipher, Hash, Iterations, list_to_binary(PassPhrase), TermValue),
+            {ok, io_lib:format("~p", [{encrypted, Result}])}
+        end
+    catch
+        _:Msg -> {error, io_lib:format("Error during cipher operation: ~p", [Msg])}
+    end;
+
+encode_encrypt_decrypt(_CipherExists, _HashExists, Decode, Cipher, Hash, Iterations, Args) when length(Args) == 2, Decode ->
+    [Value, PassPhrase] = Args,
+    try begin
+            TermValue = evaluate_input_as_term(Value),
+            TermToDecrypt = case TermValue of
+                {encrypted, EncryptedTerm} ->
+                    EncryptedTerm;
+                _ ->
+                    TermValue
+            end,
+            Result = rabbit_pbe:decrypt_term(Cipher, Hash, Iterations, list_to_binary(PassPhrase), TermToDecrypt),
+            {ok, io_lib:format("~p", [Result])}
+        end
+    catch
+        _:Msg -> {error, io_lib:format("Error during cipher operation: ~p", [Msg])}
+    end;
+
+encode_encrypt_decrypt(_CipherExists, _HashExists, _Decode, _Cipher, _Hash, _Iterations, _Args) ->
+    {error, io_lib:format("Please provide a value to encode/decode and a passphrase", [])}.
+
+evaluate_input_as_term(Input) ->
+    {ok,Tokens,_EndLine} = erl_scan:string(Input ++ "."),
+    {ok,AbsForm} = erl_parse:parse_exprs(Tokens),
+    {value,TermValue,_Bs} = erl_eval:exprs(AbsForm, erl_eval:new_bindings()),
+    TermValue.
similarity index 91%
rename from rabbitmq-server/src/rabbit_dead_letter.erl
rename to deps/rabbit/src/rabbit_dead_letter.erl
index 29032df856c3644a1910b184d4b1fcc533b3224f..91d23c83a4d6c7ba32b97996b37f82b5ec6db4a1 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_dead_letter).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -type reason() :: 'expired' | 'rejected' | 'maxlen'.
 
 -spec publish(rabbit_types:message(), reason(), rabbit_types:exchange(),
               'undefined' | binary(), rabbit_amqqueue:name()) -> 'ok'.
 
--endif.
-
 %%----------------------------------------------------------------------------
 
 publish(Msg, Reason, X, RK, QName) ->
@@ -53,7 +49,7 @@ make_msg(Msg = #basic_message{content       = Content,
             _         -> {[RK], fun (H) -> lists:keydelete(<<"CC">>, 1, H) end}
         end,
     ReasonBin = list_to_binary(atom_to_list(Reason)),
-    TimeSec = rabbit_misc:now_ms() div 1000,
+    TimeSec = time_compat:os_system_time(seconds),
     PerMsgTTL = per_msg_ttl_header(Content#content.properties),
     HeadersFun2 =
         fun (Headers) ->
@@ -139,7 +135,19 @@ update_x_death_header(Info, Headers) ->
                     end,
             rabbit_misc:set_table_value(
               Headers, <<"x-death">>, array,
-              [{table, rabbit_misc:sort_field_table(Info1)} | Others])
+              [{table, rabbit_misc:sort_field_table(Info1)} | Others]);
+        {<<"x-death">>, InvalidType, Header} ->
+            rabbit_log:warning("Message has invalid x-death header (type: ~p)."
+                               " Resetting header ~p~n",
+                               [InvalidType, Header]),
+            %% if x-death is something other than an array (list)
+            %% then we reset it: this happens when some clients consume
+            %% a message and re-publish is, converting header values
+            %% to strings, intentionally or not.
+            %% See rabbitmq/rabbitmq-server#767 for details.
+            rabbit_misc:set_table_value(
+              Headers, <<"x-death">>, array,
+              [{table, [{<<"count">>, long, 1} | Info]}])
     end.
 
 ensure_xdeath_event_count({table, Info}, InitialVal) when InitialVal >= 1 ->
similarity index 74%
rename from rabbitmq-server/src/rabbit_diagnostics.erl
rename to deps/rabbit/src/rabbit_diagnostics.erl
index 531f3f922ebc79a66a5a02704807980739fe8c12..d28bb9ffd732437bbe087384914d9b4da16aaaa9 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_diagnostics).
 
 -define(PROCESS_INFO,
-        [registered_name, current_stacktrace, initial_call, dictionary,
-         message_queue_len, links, monitors, monitored_by, heap_size]).
+        [registered_name, current_stacktrace, initial_call, message_queue_len,
+         links, monitors, monitored_by, heap_size]).
 
 -export([maybe_stuck/0, maybe_stuck/1, top_memory_use/0, top_memory_use/1,
          top_binary_refs/0, top_binary_refs/1]).
@@ -27,17 +27,17 @@ maybe_stuck() -> maybe_stuck(5000).
 
 maybe_stuck(Timeout) ->
     Pids = processes(),
-    io:format("There are ~p processes.~n", [length(Pids)]),
+    io:format("~s There are ~p processes.~n", [get_time(), length(Pids)]),
     maybe_stuck(Pids, Timeout).
 
 maybe_stuck(Pids, Timeout) when Timeout =< 0 ->
-    io:format("Found ~p suspicious processes.~n", [length(Pids)]),
-    [io:format("~p~n", [info(Pid)]) || Pid <- Pids],
+    io:format("~s Found ~p suspicious processes.~n", [get_time(), length(Pids)]),
+    [io:format("~s ~p~n", [get_time(), info(Pid)]) || Pid <- Pids],
     ok;
 maybe_stuck(Pids, Timeout) ->
     Pids2 = [P || P  <- Pids, looks_stuck(P)],
-    io:format("Investigated ~p processes this round, ~pms to go.~n",
-              [length(Pids2), Timeout]),
+    io:format("~s Investigated ~p processes this round, ~pms to go.~n",
+              [get_time(), length(Pids2), Timeout]),
     timer:sleep(500),
     maybe_stuck(Pids2, Timeout - 500).
 
@@ -80,19 +80,19 @@ top_memory_use() -> top_memory_use(30).
 
 top_memory_use(Count) ->
     Pids = processes(),
-    io:format("Memory use: top ~p of ~p processes.~n", [Count, length(Pids)]),
+    io:format("~s Memory use: top ~p of ~p processes.~n", [get_time(), Count, length(Pids)]),
     Procs = [{info(Pid, memory, 0), info(Pid)} || Pid <- Pids],
     Sorted = lists:sublist(lists:reverse(lists:sort(Procs)), Count),
-    io:format("~p~n", [Sorted]).
+    io:format("~s ~p~n", [get_time(), Sorted]).
 
 top_binary_refs() -> top_binary_refs(30).
 
 top_binary_refs(Count) ->
     Pids = processes(),
-    io:format("Binary refs: top ~p of ~p processes.~n", [Count, length(Pids)]),
+    io:format("~s Binary refs: top ~p of ~p processes.~n", [get_time(), Count, length(Pids)]),
     Procs = [{{binary_refs, binary_refs(Pid)}, info(Pid)} || Pid <- Pids],
     Sorted = lists:sublist(lists:reverse(lists:sort(Procs)), Count),
-    io:format("~p~n", [Sorted]).
+    io:format("~s ~p~n", [get_time(), Sorted]).
 
 binary_refs(Pid) ->
     {binary, Refs} = info(Pid, binary, []),
@@ -111,3 +111,16 @@ info(Pid, Infos, Default) ->
                    false -> Default
                end
     end.
+
+get_time() ->
+    {{Y,M,D}, {H,Min,Sec}} = calendar:local_time(),
+    [ integer_to_list(Y), "-", 
+      prefix_zero(integer_to_list(M)), "-", 
+      prefix_zero(integer_to_list(D)), " ",
+      prefix_zero(integer_to_list(H)), ":", 
+      prefix_zero(integer_to_list(Min)), ":", 
+      prefix_zero(integer_to_list(Sec)) 
+      ].
+
+prefix_zero([C]) -> [$0, C];
+prefix_zero([_,_] = Full) -> Full.
similarity index 73%
rename from rabbitmq-server/src/rabbit_direct.erl
rename to deps/rabbit/src/rabbit_direct.erl
index d79ef4aeb9a3f4ec50f3a3f5e0aee5fe15fca010..061105c150126d58996496049877d5636fec7681 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_direct).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(boot/0 :: () -> 'ok').
--spec(force_event_refresh/1 :: (reference()) -> 'ok').
--spec(list/0 :: () -> [pid()]).
--spec(list_local/0 :: () -> [pid()]).
--spec(connect/5 :: (({'none', 'none'} | {rabbit_types:username(), 'none'} |
-                     {rabbit_types:username(), rabbit_types:password()}),
-                    rabbit_types:vhost(), rabbit_types:protocol(), pid(),
-                    rabbit_event:event_props()) ->
-                        rabbit_types:ok_or_error2(
-                          {rabbit_types:user(), rabbit_framing:amqp_table()},
-                          'broker_not_found_on_node' |
-                          {'auth_failure', string()} | 'access_refused')).
--spec(start_channel/9 ::
+-spec boot() -> 'ok'.
+-spec force_event_refresh(reference()) -> 'ok'.
+-spec list() -> [pid()].
+-spec list_local() -> [pid()].
+-spec connect
+        (({'none', 'none'} | {rabbit_types:username(), 'none'} |
+          {rabbit_types:username(), rabbit_types:password()}),
+         rabbit_types:vhost(), rabbit_types:protocol(), pid(),
+         rabbit_event:event_props()) ->
+            rabbit_types:ok_or_error2(
+              {rabbit_types:user(), rabbit_framing:amqp_table()},
+              'broker_not_found_on_node' |
+              {'auth_failure', string()} | 'access_refused').
+-spec start_channel
         (rabbit_channel:channel_number(), pid(), pid(), string(),
          rabbit_types:protocol(), rabbit_types:user(), rabbit_types:vhost(),
-         rabbit_framing:amqp_table(), pid()) -> {'ok', pid()}).
--spec(disconnect/2 :: (pid(), rabbit_event:event_props()) -> 'ok').
-
--endif.
+         rabbit_framing:amqp_table(), pid()) ->
+            {'ok', pid()}.
+-spec disconnect(pid(), rabbit_event:event_props()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -76,8 +74,8 @@ connect({Username, none}, VHost, Protocol, Pid, Infos) ->
              VHost, Protocol, Pid, Infos);
 
 connect({Username, Password}, VHost, Protocol, Pid, Infos) ->
-    connect0(fun () -> rabbit_access_control:check_user_pass_login(
-                         Username, Password) end,
+    connect0(fun () -> rabbit_access_control:check_user_login(
+                         Username, [{password, Password}, {vhost, VHost}]) end,
              VHost, Protocol, Pid, Infos).
 
 connect0(AuthFun, VHost, Protocol, Pid, Infos) ->
@@ -102,14 +100,20 @@ notify_auth_result(Username, AuthResult, ExtraProps) ->
                  ExtraProps,
     rabbit_event:notify(AuthResult, [P || {_, V} = P <- EventProps, V =/= '']).
 
+authz_socket_info_direct(Infos) ->
+    #authz_socket_info{sockname={proplists:get_value(host, Infos),
+                                 proplists:get_value(port, Infos)},
+                       peername={proplists:get_value(peer_host, Infos),
+                                 proplists:get_value(peer_port, Infos)}}.
+
 connect1(User, VHost, Protocol, Pid, Infos) ->
-    try rabbit_access_control:check_vhost_access(User, VHost, undefined) of
+    try rabbit_access_control:check_vhost_access(User, VHost, authz_socket_info_direct(Infos)) of
         ok -> ok = pg_local:join(rabbit_direct, Pid),
               rabbit_event:notify(connection_created, Infos),
               {ok, {User, rabbit_reader:server_properties(Protocol)}}
     catch
-        exit:#amqp_error{name = access_refused} ->
-            {error, access_refused}
+        exit:#amqp_error{name = Reason = not_allowed} ->
+            {error, Reason}
     end.
 
 start_channel(Number, ClientChannelPid, ConnPid, ConnName, Protocol, User,
similarity index 70%
rename from rabbitmq-server/src/rabbit_disk_monitor.erl
rename to deps/rabbit/src/rabbit_disk_monitor.erl
index 518000eb645ba21b6da7fbe0f1153bf4afadb7a6..4c1ff0248621b9c911d1ae86d4c552a9e0913798 100644 (file)
@@ -1,4 +1,4 @@
-%% The contents of this file are subject to the Mozilla Public License
+% The contents of this file are subject to the Mozilla Public License
 %% Version 1.1 (the "License"); you may not use this file except in
 %% compliance with the License. You may obtain a copy of the License
 %% at http://www.mozilla.org/MPL/
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_disk_monitor).
 
+%% Disk monitoring server. Monitors free disk space
+%% periodically and sets alarms when it is below a certain
+%% watermark (configurable either as an absolute value or
+%% relative to the memory limit).
+%%
+%% Disk monitoring is done by shelling out to /usr/bin/df
+%% instead of related built-in OTP functions because currently
+%% this is the most reliable way of determining free disk space
+%% for the partition our internal database is on.
+%%
+%% Update interval is dynamically calculated assuming disk
+%% space is being filled at FAST_RATE.
+
 -behaviour(gen_server).
 
 -export([start_link/1]).
 -define(SERVER, ?MODULE).
 -define(DEFAULT_MIN_DISK_CHECK_INTERVAL, 100).
 -define(DEFAULT_MAX_DISK_CHECK_INTERVAL, 10000).
+-define(DEFAULT_DISK_FREE_LIMIT, 50000000).
 %% 250MB/s i.e. 250kB/ms
 -define(FAST_RATE, (250 * 1000)).
 
--record(state, {dir,
-                limit,
-                actual,
-                min_interval,
-                max_interval,
-                timer,
-                alarmed
-               }).
+-record(state, {
+          %% monitor partition on which this directory resides
+          dir,
+          %% configured limit in bytes
+          limit,
+          %% last known free disk space amount in bytes
+          actual,
+          %% minimum check interval
+          min_interval,
+          %% maximum check interval
+          max_interval,
+          %% timer that drives periodic checks
+          timer,
+          %% is free disk space alarm currently in effect?
+          alarmed,
+          %% is monitoring enabled? false on unsupported
+          %% platforms
+          enabled
+}).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(disk_free_limit() :: (integer() | {'mem_relative', float()})).
--spec(start_link/1 :: (disk_free_limit()) -> rabbit_types:ok_pid_or_error()).
--spec(get_disk_free_limit/0 :: () -> integer()).
--spec(set_disk_free_limit/1 :: (disk_free_limit()) -> 'ok').
--spec(get_min_check_interval/0 :: () -> integer()).
--spec(set_min_check_interval/1 :: (integer()) -> 'ok').
--spec(get_max_check_interval/0 :: () -> integer()).
--spec(set_max_check_interval/1 :: (integer()) -> 'ok').
--spec(get_disk_free/0 :: () -> (integer() | 'unknown')).
-
--endif.
+-type disk_free_limit() :: (integer() | string() | {'mem_relative', float()}).
+-spec start_link(disk_free_limit()) -> rabbit_types:ok_pid_or_error().
+-spec get_disk_free_limit() -> integer().
+-spec set_disk_free_limit(disk_free_limit()) -> 'ok'.
+-spec get_min_check_interval() -> integer().
+-spec set_min_check_interval(integer()) -> 'ok'.
+-spec get_max_check_interval() -> integer().
+-spec set_max_check_interval(integer()) -> 'ok'.
+-spec get_disk_free() -> (integer() | 'unknown').
 
 %%----------------------------------------------------------------------------
 %% Public API
@@ -96,7 +117,8 @@ init([Limit]) ->
     State = #state{dir          = Dir,
                    min_interval = ?DEFAULT_MIN_DISK_CHECK_INTERVAL,
                    max_interval = ?DEFAULT_MAX_DISK_CHECK_INTERVAL,
-                   alarmed      = false},
+                   alarmed      = false,
+                   enabled      = true},
     case {catch get_disk_free(Dir),
           vm_memory_monitor:get_total_memory()} of
         {N1, N2} when is_integer(N1), is_integer(N2) ->
@@ -104,12 +126,17 @@ init([Limit]) ->
         Err ->
             rabbit_log:info("Disabling disk free space monitoring "
                             "on unsupported platform:~n~p~n", [Err]),
-            {stop, unsupported_platform}
+            {ok, State#state{enabled = false}}
     end.
 
 handle_call(get_disk_free_limit, _From, State = #state{limit = Limit}) ->
     {reply, Limit, State};
 
+handle_call({set_disk_free_limit, _}, _From, #state{enabled = false} = State) ->
+    rabbit_log:info("Cannot set disk free limit: "
+                   "disabled disk free space monitoring", []),
+    {reply, ok, State};
+
 handle_call({set_disk_free_limit, Limit}, _From, State) ->
     {reply, ok, set_disk_limits(State, Limit)};
 
@@ -182,9 +209,11 @@ get_disk_free(Dir) ->
 
 get_disk_free(Dir, {unix, Sun})
   when Sun =:= sunos; Sun =:= sunos4; Sun =:= solaris ->
-    parse_free_unix(rabbit_misc:os_cmd("/usr/bin/df -k " ++ Dir));
+    Df = os:find_executable("df"),
+    parse_free_unix(rabbit_misc:os_cmd(Df ++ " -k " ++ Dir));
 get_disk_free(Dir, {unix, _}) ->
-    parse_free_unix(rabbit_misc:os_cmd("/bin/df -kP " ++ Dir));
+    Df = os:find_executable("df"),
+    parse_free_unix(rabbit_misc:os_cmd(Df ++ " -kP " ++ Dir));
 get_disk_free(Dir, {win32, _}) ->
     parse_free_win32(rabbit_misc:os_cmd("dir /-C /W \"" ++ Dir ++ "\"")).
 
@@ -203,10 +232,17 @@ parse_free_win32(CommandResult) ->
                              [{capture, all_but_first, list}]),
     list_to_integer(lists:reverse(Free)).
 
-interpret_limit({mem_relative, R}) ->
-    round(R * vm_memory_monitor:get_total_memory());
-interpret_limit(L) ->
-    L.
+interpret_limit({mem_relative, Relative}) 
+    when is_float(Relative) ->
+    round(Relative * vm_memory_monitor:get_total_memory());
+interpret_limit(Absolute) -> 
+    case rabbit_resource_monitor_misc:parse_information_unit(Absolute) of
+        {ok, ParsedAbsolute} -> ParsedAbsolute;
+        {error, parse_error} ->
+            rabbit_log:error("Unable to parse disk_free_limit value ~p", 
+                             [Absolute]),
+            ?DEFAULT_DISK_FREE_LIMIT
+    end.
 
 emit_update_info(StateStr, CurrentFree, Limit) ->
     rabbit_log:info(
similarity index 95%
rename from rabbitmq-server/src/rabbit_epmd_monitor.erl
rename to deps/rabbit/src/rabbit_epmd_monitor.erl
index 5b06237e56ea0610a15f80fa191219509303e33a..7f01a7183889755fa7e7c73375f47207967316fc 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_epmd_monitor).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
 
 %%----------------------------------------------------------------------------
 %% It's possible for epmd to be killed out from underneath us. If that
similarity index 81%
rename from rabbitmq-server/src/rabbit_error_logger.erl
rename to deps/rabbit/src/rabbit_error_logger.erl
index eecb2d64d981a8b90df5f763aef01d4f4f54392b..5ba3ce7a4f3d408821b670f4cbd5d7db932949b1 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_error_logger).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start/0 :: () -> 'ok').
--spec(stop/0  :: () -> 'ok').
-
--endif.
+-spec start() -> 'ok'.
+-spec stop() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -54,7 +50,7 @@ start() ->
 
 stop() ->
     case error_logger:delete_report_handler(rabbit_error_logger) of
-        terminated_ok             -> ok;
+        ok                        -> ok;
         {error, module_not_found} -> ok
     end.
 
@@ -69,7 +65,7 @@ init([DefaultVHost]) ->
                    name = ?LOG_EXCH_NAME}}.
 
 terminate(_Arg, _State) ->
-    terminated_ok.
+    ok.
 
 code_change(_OldVsn, State, _Extra) ->
     {ok, State}.
@@ -101,12 +97,15 @@ publish(_Other, _Format, _Data, _State) ->
 publish1(RoutingKey, Format, Data, LogExch) ->
     %% 0-9-1 says the timestamp is a "64 bit POSIX timestamp". That's
     %% second resolution, not millisecond.
-    Timestamp = rabbit_misc:now_ms() div 1000,
+    Timestamp = time_compat:os_system_time(seconds),
 
     Args = [truncate:term(A, ?LOG_TRUNC) || A <- Data],
-    {ok, _DeliveredQPids} =
-        rabbit_basic:publish(LogExch, RoutingKey,
-                             #'P_basic'{content_type = <<"text/plain">>,
-                                        timestamp    = Timestamp},
-                             list_to_binary(io_lib:format(Format, Args))),
-    ok.
+    Headers = [{<<"node">>, longstr, list_to_binary(atom_to_list(node()))}],
+    case rabbit_basic:publish(LogExch, RoutingKey,
+                              #'P_basic'{content_type = <<"text/plain">>,
+                                         timestamp    = Timestamp,
+                                         headers      = Headers},
+                              list_to_binary(io_lib:format(Format, Args))) of
+        {ok, _QPids}  -> ok;
+        {error, _Err} -> ok
+    end.
similarity index 97%
rename from rabbitmq-server/src/rabbit_error_logger_file_h.erl
rename to deps/rabbit/src/rabbit_error_logger_file_h.erl
index f8166bfcd13be4462d2d895b5c059fdb3193e56f..930aead392fd3d8dcb042bd2ae829e59aad4ef87 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_error_logger_file_h).
@@ -55,6 +55,9 @@ get_depth() ->
 %% lib/stdlib/src/error_logger_file_h.erl from R14B3 was copied as
 %% init_file/2 and changed so that it opens the file in 'append' mode.
 
+%% Log rotation with empty suffix should result only in file re-opening.
+init({{File, ""}, _}) ->
+    init(File);
 %% Used only when swapping handlers in log rotation, pre OTP 18.1
 init({{File, Suffix}, []}) ->
     rotate_logs(File, Suffix),
similarity index 78%
rename from rabbitmq-server/src/rabbit_exchange.erl
rename to deps/rabbit/src/rabbit_exchange.erl
index 459334455f749709fa44e8d77603d75d28c9e7cb..aaea27f91ac93eafc140df10527e7602d35a92f4 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange).
          assert_equivalence/6, assert_args_equivalence/2, check_type/1,
          lookup/1, lookup_or_die/1, list/0, list/1, lookup_scratch/2,
          update_scratch/3, update_decorators/1, immutable/1,
-         info_keys/0, info/1, info/2, info_all/1, info_all/2,
+         info_keys/0, info/1, info/2, info_all/1, info_all/2, info_all/4,
          route/2, delete/2, validate_binding/2]).
 %% these must be run inside a mnesia tx
 -export([maybe_auto_delete/2, serial/1, peek_serial/1, update/2]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([name/0, type/0]).
 
--type(name() :: rabbit_types:r('exchange')).
--type(type() :: atom()).
--type(fun_name() :: atom()).
+-type name() :: rabbit_types:r('exchange').
+-type type() :: atom().
+-type fun_name() :: atom().
 
--spec(recover/0 :: () -> [name()]).
--spec(callback/4::
+-spec recover() -> [name()].
+-spec callback
         (rabbit_types:exchange(), fun_name(),
-         fun((boolean()) -> non_neg_integer()) | atom(), [any()]) -> 'ok').
--spec(policy_changed/2 ::
-        (rabbit_types:exchange(), rabbit_types:exchange()) -> 'ok').
--spec(declare/6 ::
+         fun((boolean()) -> non_neg_integer()) | atom(), [any()]) -> 'ok'.
+-spec policy_changed
+        (rabbit_types:exchange(), rabbit_types:exchange()) -> 'ok'.
+-spec declare
         (name(), type(), boolean(), boolean(), boolean(),
          rabbit_framing:amqp_table())
-        -> rabbit_types:exchange()).
--spec(check_type/1 ::
-        (binary()) -> atom() | rabbit_types:connection_exit()).
--spec(assert_equivalence/6 ::
+        -> rabbit_types:exchange().
+-spec check_type
+        (binary()) -> atom() | rabbit_types:connection_exit().
+-spec assert_equivalence
         (rabbit_types:exchange(), atom(), boolean(), boolean(), boolean(),
          rabbit_framing:amqp_table())
-        -> 'ok' | rabbit_types:connection_exit()).
--spec(assert_args_equivalence/2 ::
+        -> 'ok' | rabbit_types:connection_exit().
+-spec assert_args_equivalence
         (rabbit_types:exchange(), rabbit_framing:amqp_table())
-        -> 'ok' | rabbit_types:connection_exit()).
--spec(lookup/1 ::
+        -> 'ok' | rabbit_types:connection_exit().
+-spec lookup
         (name()) -> rabbit_types:ok(rabbit_types:exchange()) |
-                    rabbit_types:error('not_found')).
--spec(lookup_or_die/1 ::
+                    rabbit_types:error('not_found').
+-spec lookup_or_die
         (name()) -> rabbit_types:exchange() |
-                    rabbit_types:channel_exit()).
--spec(list/0 :: () -> [rabbit_types:exchange()]).
--spec(list/1 :: (rabbit_types:vhost()) -> [rabbit_types:exchange()]).
--spec(lookup_scratch/2 :: (name(), atom()) ->
+                    rabbit_types:channel_exit().
+-spec list() -> [rabbit_types:exchange()].
+-spec list(rabbit_types:vhost()) -> [rabbit_types:exchange()].
+-spec lookup_scratch(name(), atom()) ->
                                rabbit_types:ok(term()) |
-                               rabbit_types:error('not_found')).
--spec(update_scratch/3 :: (name(), atom(), fun((any()) -> any())) -> 'ok').
--spec(update/2 ::
+                               rabbit_types:error('not_found').
+-spec update_scratch(name(), atom(), fun((any()) -> any())) -> 'ok'.
+-spec update
         (name(),
          fun((rabbit_types:exchange()) -> rabbit_types:exchange()))
-         -> not_found | rabbit_types:exchange()).
--spec(update_decorators/1 :: (name()) -> 'ok').
--spec(immutable/1 :: (rabbit_types:exchange()) -> rabbit_types:exchange()).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(info/1 :: (rabbit_types:exchange()) -> rabbit_types:infos()).
--spec(info/2 ::
+         -> not_found | rabbit_types:exchange().
+-spec update_decorators(name()) -> 'ok'.
+-spec immutable(rabbit_types:exchange()) -> rabbit_types:exchange().
+-spec info_keys() -> rabbit_types:info_keys().
+-spec info(rabbit_types:exchange()) -> rabbit_types:infos().
+-spec info
         (rabbit_types:exchange(), rabbit_types:info_keys())
-        -> rabbit_types:infos()).
--spec(info_all/1 :: (rabbit_types:vhost()) -> [rabbit_types:infos()]).
--spec(info_all/2 ::(rabbit_types:vhost(), rabbit_types:info_keys())
-                   -> [rabbit_types:infos()]).
--spec(route/2 :: (rabbit_types:exchange(), rabbit_types:delivery())
-                 -> [rabbit_amqqueue:name()]).
--spec(delete/2 ::
+        -> rabbit_types:infos().
+-spec info_all(rabbit_types:vhost()) -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:vhost(), rabbit_types:info_keys())
+                   -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:vhost(), rabbit_types:info_keys(),
+                    reference(), pid())
+                   -> 'ok'.
+-spec route(rabbit_types:exchange(), rabbit_types:delivery())
+                 -> [rabbit_amqqueue:name()].
+-spec delete
         (name(),  'true') -> 'ok' | rabbit_types:error('not_found' | 'in_use');
-        (name(), 'false') -> 'ok' | rabbit_types:error('not_found')).
--spec(validate_binding/2 ::
+        (name(), 'false') -> 'ok' | rabbit_types:error('not_found').
+-spec validate_binding
         (rabbit_types:exchange(), rabbit_types:binding())
-        -> rabbit_types:ok_or_error({'binding_invalid', string(), [any()]})).
--spec(maybe_auto_delete/2::
+        -> rabbit_types:ok_or_error({'binding_invalid', string(), [any()]}).
+-spec maybe_auto_delete
         (rabbit_types:exchange(), boolean())
-        -> 'not_deleted' | {'deleted', rabbit_binding:deletions()}).
--spec(serial/1 :: (rabbit_types:exchange()) ->
-                       fun((boolean()) -> 'none' | pos_integer())).
--spec(peek_serial/1 :: (name()) -> pos_integer() | 'undefined').
-
--endif.
+        -> 'not_deleted' | {'deleted', rabbit_binding:deletions()}.
+-spec serial(rabbit_types:exchange()) ->
+                       fun((boolean()) -> 'none' | pos_integer()).
+-spec peek_serial(name()) -> pos_integer() | 'undefined'.
 
 %%----------------------------------------------------------------------------
 
@@ -163,24 +162,37 @@ declare(XName, Type, Durable, AutoDelete, Internal, Args) ->
     XT = type_to_module(Type),
     %% We want to upset things if it isn't ok
     ok = XT:validate(X),
-    rabbit_misc:execute_mnesia_transaction(
-      fun () ->
-              case mnesia:wread({rabbit_exchange, XName}) of
-                  [] ->
-                      {new, store(X)};
-                  [ExistingX] ->
-                      {existing, ExistingX}
-              end
-      end,
-      fun ({new, Exchange}, Tx) ->
-              ok = callback(X, create, map_create_tx(Tx), [Exchange]),
-              rabbit_event:notify_if(not Tx, exchange_created, info(Exchange)),
-              Exchange;
-          ({existing, Exchange}, _Tx) ->
-              Exchange;
-          (Err, _Tx) ->
-              Err
-      end).
+    %% Avoid a channel exception if there's a race condition
+    %% with an exchange.delete operation.
+    %%
+    %% See rabbitmq/rabbitmq-federation#7.
+    case rabbit_runtime_parameters:lookup(XName#resource.virtual_host,
+                                          ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT,
+                                          XName#resource.name) of
+        not_found ->
+            rabbit_misc:execute_mnesia_transaction(
+              fun () ->
+                      case mnesia:wread({rabbit_exchange, XName}) of
+                          [] ->
+                              {new, store(X)};
+                          [ExistingX] ->
+                              {existing, ExistingX}
+                      end
+              end,
+              fun ({new, Exchange}, Tx) ->
+                      ok = callback(X, create, map_create_tx(Tx), [Exchange]),
+                      rabbit_event:notify_if(not Tx, exchange_created, info(Exchange)),
+                      Exchange;
+                  ({existing, Exchange}, _Tx) ->
+                      Exchange;
+                  (Err, _Tx) ->
+                      Err
+              end);
+        _ ->
+            rabbit_log:warning("ignoring exchange.declare for exchange ~p,
+                                exchange.delete in progress~n.", [XName]),
+            X
+    end.
 
 map_create_tx(true)  -> transaction;
 map_create_tx(false) -> none.
@@ -340,6 +352,10 @@ info_all(VHostPath) -> map(VHostPath, fun (X) -> info(X) end).
 
 info_all(VHostPath, Items) -> map(VHostPath, fun (X) -> info(X, Items) end).
 
+info_all(VHostPath, Items, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(
+      AggregatorPid, Ref, fun(X) -> info(X, Items) end, list(VHostPath)).
+
 route(#exchange{name = #resource{virtual_host = VHost, name = RName} = XName,
                 decorators = Decorators} = X,
       #delivery{message = #basic_message{routing_keys = RKs}} = Delivery) ->
@@ -420,18 +436,31 @@ delete(XName, IfUnused) ->
               true  -> fun conditional_delete/2;
               false -> fun unconditional_delete/2
           end,
-    call_with_exchange(
-      XName,
-      fun (X) ->
-              case Fun(X, false) of
-                  {deleted, X, Bs, Deletions} ->
-                      rabbit_binding:process_deletions(
-                        rabbit_binding:add_deletion(
-                          XName, {X, deleted, Bs}, Deletions));
-                  {error, _InUseOrNotFound} = E ->
-                      rabbit_misc:const(E)
-              end
-      end).
+    try
+        %% guard exchange.declare operations from failing when there's
+        %% a race condition between it and an exchange.delete.
+        %%
+        %% see rabbitmq/rabbitmq-federation#7
+        rabbit_runtime_parameters:set(XName#resource.virtual_host,
+                                      ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT,
+                                      XName#resource.name, true, none),
+        call_with_exchange(
+          XName,
+          fun (X) ->
+                  case Fun(X, false) of
+                      {deleted, X, Bs, Deletions} ->
+                          rabbit_binding:process_deletions(
+                            rabbit_binding:add_deletion(
+                              XName, {X, deleted, Bs}, Deletions));
+                      {error, _InUseOrNotFound} = E ->
+                          rabbit_misc:const(E)
+                  end
+          end)
+    after
+        rabbit_runtime_parameters:clear(XName#resource.virtual_host,
+                                        ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT,
+                                        XName#resource.name)
+    end.
 
 validate_binding(X = #exchange{type = XType}, Binding) ->
     Module = type_to_module(XType),
diff --git a/deps/rabbit/src/rabbit_exchange_parameters.erl b/deps/rabbit/src/rabbit_exchange_parameters.erl
new file mode 100644 (file)
index 0000000..c0ca0a9
--- /dev/null
@@ -0,0 +1,49 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_exchange_parameters).
+
+-behaviour(rabbit_runtime_parameter).
+
+-include("rabbit.hrl").
+
+-export([register/0]).
+-export([validate/5, notify/4, notify_clear/3]).
+
+-import(rabbit_misc, [pget/2]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "exchange parameters"},
+                    {mfa, {rabbit_exchange_parameters, register, []}},
+                    {requires, rabbit_registry},
+                    {enables, recovery}]}).
+
+register() ->
+    rabbit_registry:register(runtime_parameter,
+                             ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT, ?MODULE),
+    %% ensure there are no leftovers from before node restart/crash
+    rabbit_runtime_parameters:clear_component(
+      ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT),
+    ok.
+
+validate(_VHost, ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT, _Name, _Term, _User) ->
+    ok.
+
+notify(_VHost, ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT, _Name, _Term) ->
+    ok.
+
+notify_clear(_VHost, ?EXCHANGE_DELETE_IN_PROGRESS_COMPONENT, _Name) ->
+    ok.
similarity index 96%
rename from rabbitmq-server/src/rabbit_exchange_type_direct.erl
rename to deps/rabbit/src/rabbit_exchange_type_direct.erl
index 5c4ab3cf464bb62c85ed6e4bb9c0d2edf3b70bc3..8a6886e37690bd23c5f9285cc4dc5a4bf82d8027 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type_direct).
similarity index 96%
rename from rabbitmq-server/src/rabbit_exchange_type_fanout.erl
rename to deps/rabbit/src/rabbit_exchange_type_fanout.erl
index 67d88d61af94c978e282632d74ed1a8ac67d95f3..d81e407f8f571b2216a17db84270560a771e21b8 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type_fanout).
similarity index 73%
rename from rabbitmq-server/src/rabbit_exchange_type_headers.erl
rename to deps/rabbit/src/rabbit_exchange_type_headers.erl
index b56efdfaf6d56bf3ca8e0faeadace8f346e8befa..196873aa22dba2a2f69f8f2f3863a86c9e55e90b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type_headers).
                     {requires,    rabbit_registry},
                     {enables,     kernel_ready}]}).
 
--ifdef(use_specs).
--spec(headers_match/2 :: (rabbit_framing:amqp_table(),
-                          rabbit_framing:amqp_table()) -> boolean()).
--endif.
+-spec headers_match
+        (rabbit_framing:amqp_table(), rabbit_framing:amqp_table()) ->
+            boolean().
 
 description() ->
     [{description, <<"AMQP headers exchange, as per the AMQP specification">>}].
@@ -85,35 +84,51 @@ headers_match(Args, Data) ->
     MK = parse_x_match(rabbit_misc:table_lookup(Args, <<"x-match">>)),
     headers_match(Args, Data, true, false, MK).
 
-headers_match([], _Data, AllMatch, _AnyMatch, all) ->
-    AllMatch;
-headers_match([], _Data, _AllMatch, AnyMatch, any) ->
-    AnyMatch;
+% A bit less horrendous algorithm :)
+headers_match(_, _, false, _, all) -> false;
+headers_match(_, _, _, true, any) -> true;
+
+% No more bindings, return current state
+headers_match([], _Data, AllMatch, _AnyMatch, all) -> AllMatch;
+headers_match([], _Data, _AllMatch, AnyMatch, any) -> AnyMatch;
+
+% Delete bindings starting with x-
 headers_match([{<<"x-", _/binary>>, _PT, _PV} | PRest], Data,
               AllMatch, AnyMatch, MatchKind) ->
     headers_match(PRest, Data, AllMatch, AnyMatch, MatchKind);
+
+% No more data, but still bindings, false with all
 headers_match(_Pattern, [], _AllMatch, AnyMatch, MatchKind) ->
     headers_match([], [], false, AnyMatch, MatchKind);
+
+% Data key header not in binding, go next data
 headers_match(Pattern = [{PK, _PT, _PV} | _], [{DK, _DT, _DV} | DRest],
               AllMatch, AnyMatch, MatchKind) when PK > DK ->
     headers_match(Pattern, DRest, AllMatch, AnyMatch, MatchKind);
+
+% Binding key header not in data, false with all, go next binding
 headers_match([{PK, _PT, _PV} | PRest], Data = [{DK, _DT, _DV} | _],
               _AllMatch, AnyMatch, MatchKind) when PK < DK ->
     headers_match(PRest, Data, false, AnyMatch, MatchKind);
-headers_match([{PK, PT, PV} | PRest], [{DK, DT, DV} | DRest],
-              AllMatch, AnyMatch, MatchKind) when PK == DK ->
-    {AllMatch1, AnyMatch1} =
-        case rabbit_misc:type_class(PT) == rabbit_misc:type_class(DT) of
-            %% It's not properly specified, but a "no value" in a
-            %% pattern field is supposed to mean simple presence of
-            %% the corresponding data field. I've interpreted that to
-            %% mean a type of "void" for the pattern field.
-            _ when PT == void -> {AllMatch, true};
-            false             -> {false, AnyMatch};
-            _ when PV == DV   -> {AllMatch, true};
-            _                 -> {false, AnyMatch}
-        end,
-    headers_match(PRest, DRest, AllMatch1, AnyMatch1, MatchKind).
+
+%% It's not properly specified, but a "no value" in a
+%% pattern field is supposed to mean simple presence of
+%% the corresponding data field. I've interpreted that to
+%% mean a type of "void" for the pattern field.
+headers_match([{PK, void, _PV} | PRest], [{DK, _DT, _DV} | DRest],
+              AllMatch, _AnyMatch, MatchKind) when PK == DK ->
+    headers_match(PRest, DRest, AllMatch, true, MatchKind);
+
+% Complete match, true with any, go next
+headers_match([{PK, _PT, PV} | PRest], [{DK, _DT, DV} | DRest],
+              AllMatch, _AnyMatch, MatchKind) when PK == DK andalso PV == DV ->
+    headers_match(PRest, DRest, AllMatch, true, MatchKind);
+
+% Value does not match, false with all, go next
+headers_match([{PK, _PT, _PV} | PRest], [{DK, _DT, _DV} | DRest],
+              _AllMatch, AnyMatch, MatchKind) when PK == DK ->
+    headers_match(PRest, DRest, false, AnyMatch, MatchKind).
+
 
 validate(_X) -> ok.
 create(_Tx, _X) -> ok.
similarity index 89%
rename from rabbitmq-server/src/rabbit_exchange_type_invalid.erl
rename to deps/rabbit/src/rabbit_exchange_type_invalid.erl
index 283bd494ed58f1c29dd31aa76c2074e4bf828619..2510c8a241c37701b22c58ca3fcc6ac6a24e10a7 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type_invalid).
@@ -31,10 +31,8 @@ description() ->
 
 serialise_events() -> false.
 
--ifdef(use_specs).
--spec(route/2 :: (rabbit_types:exchange(), rabbit_types:delivery())
-                 -> no_return()).
--endif.
+-spec route(rabbit_types:exchange(), rabbit_types:delivery()) -> no_return().
+
 route(#exchange{name = Name, type = Type}, _) ->
     rabbit_misc:protocol_error(
       precondition_failed,
similarity index 99%
rename from rabbitmq-server/src/rabbit_exchange_type_topic.erl
rename to deps/rabbit/src/rabbit_exchange_type_topic.erl
index 53fb7625393f27f62dbaed793326142f4bd4bef2..0eccb66cfd164cdfadeea39c424751014b42f98a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type_topic).
similarity index 87%
rename from rabbitmq-server/src/rabbit_file.erl
rename to deps/rabbit/src/rabbit_file.erl
index 6c4f0e5ccde07e8190908ad124d351a5c619f118..878b9da7a73b4be7c94f36801635276c84a3ac92 100644 (file)
@@ -23,6 +23,7 @@
 -export([append_file/2, ensure_parent_dirs_exist/1]).
 -export([rename/2, delete/1, recursive_delete/1, recursive_copy/2]).
 -export([lock_file/1]).
+-export([filename_as_a_directory/1]).
 
 -import(file_handle_cache, [with_handle/1, with_handle/2]).
 
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(ok_or_error() :: rabbit_types:ok_or_error(any())).
-
--spec(is_file/1 :: ((file:filename())) -> boolean()).
--spec(is_dir/1 :: ((file:filename())) -> boolean()).
--spec(file_size/1 :: ((file:filename())) -> non_neg_integer()).
--spec(ensure_dir/1 :: ((file:filename())) -> ok_or_error()).
--spec(wildcard/2 :: (string(), file:filename()) -> [file:filename()]).
--spec(list_dir/1 :: (file:filename()) -> rabbit_types:ok_or_error2(
-                                           [file:filename()], any())).
--spec(read_term_file/1 ::
-        (file:filename()) -> {'ok', [any()]} | rabbit_types:error(any())).
--spec(write_term_file/2 :: (file:filename(), [any()]) -> ok_or_error()).
--spec(write_file/2 :: (file:filename(), iodata()) -> ok_or_error()).
--spec(write_file/3 :: (file:filename(), iodata(), [any()]) -> ok_or_error()).
--spec(append_file/2 :: (file:filename(), string()) -> ok_or_error()).
--spec(ensure_parent_dirs_exist/1 :: (string()) -> 'ok').
--spec(rename/2 ::
-        (file:filename(), file:filename()) -> ok_or_error()).
--spec(delete/1 :: ([file:filename()]) -> ok_or_error()).
--spec(recursive_delete/1 ::
-        ([file:filename()])
-        -> rabbit_types:ok_or_error({file:filename(), any()})).
--spec(recursive_copy/2 ::
-        (file:filename(), file:filename())
-        -> rabbit_types:ok_or_error({file:filename(), file:filename(), any()})).
--spec(lock_file/1 :: (file:filename()) -> rabbit_types:ok_or_error('eexist')).
-
--endif.
+-type ok_or_error() :: rabbit_types:ok_or_error(any()).
+
+-spec is_file((file:filename())) -> boolean().
+-spec is_dir((file:filename())) -> boolean().
+-spec file_size((file:filename())) -> non_neg_integer().
+-spec ensure_dir((file:filename())) -> ok_or_error().
+-spec wildcard(string(), file:filename()) -> [file:filename()].
+-spec list_dir(file:filename()) ->
+          rabbit_types:ok_or_error2([file:filename()], any()).
+-spec read_term_file
+        (file:filename()) -> {'ok', [any()]} | rabbit_types:error(any()).
+-spec write_term_file(file:filename(), [any()]) -> ok_or_error().
+-spec write_file(file:filename(), iodata()) -> ok_or_error().
+-spec write_file(file:filename(), iodata(), [any()]) -> ok_or_error().
+-spec append_file(file:filename(), string()) -> ok_or_error().
+-spec ensure_parent_dirs_exist(string()) -> 'ok'.
+-spec rename(file:filename(), file:filename()) -> ok_or_error().
+-spec delete([file:filename()]) -> ok_or_error().
+-spec recursive_delete([file:filename()]) ->
+          rabbit_types:ok_or_error({file:filename(), any()}).
+-spec recursive_copy(file:filename(), file:filename()) ->
+          rabbit_types:ok_or_error({file:filename(), file:filename(), any()}).
+-spec lock_file(file:filename()) -> rabbit_types:ok_or_error('eexist').
+-spec filename_as_a_directory(file:filename()) -> file:filename().
 
 %%----------------------------------------------------------------------------
 
@@ -305,3 +300,11 @@ lock_file(Path) ->
                              ok = prim_file:close(Lock)
                    end)
     end.
+
+filename_as_a_directory(FileName) ->
+    case lists:last(FileName) of
+        "/" ->
+            FileName;
+        _ ->
+            FileName ++ "/"
+    end.
similarity index 90%
rename from rabbitmq-server/src/rabbit_framing.erl
rename to deps/rabbit/src/rabbit_framing.erl
index d5f46e7034e1a4f9a26ee9435cd5392b4b4313c2..e4a5013003f247ad772c4b06e0067b71aedae12c 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% TODO auto-generate
 
 -module(rabbit_framing).
 
--ifdef(use_specs).
-
 -export_type([protocol/0,
               amqp_field_type/0, amqp_property_type/0,
               amqp_table/0, amqp_array/0, amqp_value/0,
@@ -27,7 +25,7 @@
               amqp_method_field_name/0, amqp_property_record/0,
               amqp_exception/0, amqp_exception_code/0, amqp_class_id/0]).
 
--type(protocol() :: 'rabbit_framing_amqp_0_8' | 'rabbit_framing_amqp_0_9_1').
+-type protocol() :: 'rabbit_framing_amqp_0_8' | 'rabbit_framing_amqp_0_9_1'.
 
 -define(protocol_type(T), type(T :: rabbit_framing_amqp_0_8:T |
                                     rabbit_framing_amqp_0_9_1:T)).
@@ -45,5 +43,3 @@
 -?protocol_type(amqp_exception()).
 -?protocol_type(amqp_exception_code()).
 -?protocol_type(amqp_class_id()).
-
--endif.
similarity index 93%
rename from rabbitmq-server/src/rabbit_guid.erl
rename to deps/rabbit/src/rabbit_guid.erl
index 0a9fe4d873282b820ae04d3b6c8ff212ca36859b..75f9df7b3faefe24a894c699d36fe73640680e9c 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_guid).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([guid/0]).
 
--type(guid() :: binary()).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(filename/0 :: () -> string()).
--spec(gen/0 :: () -> guid()).
--spec(gen_secure/0 :: () -> guid()).
--spec(string/2 :: (guid(), any()) -> string()).
--spec(binary/2 :: (guid(), any()) -> binary()).
+-type guid() :: binary().
 
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec filename() -> string().
+-spec gen() -> guid().
+-spec gen_secure() -> guid().
+-spec string(guid(), any()) -> string().
+-spec binary(guid(), any()) -> binary().
 
 %%----------------------------------------------------------------------------
 
diff --git a/deps/rabbit/src/rabbit_hipe.erl b/deps/rabbit/src/rabbit_hipe.erl
new file mode 100644 (file)
index 0000000..d4597d4
--- /dev/null
@@ -0,0 +1,146 @@
+-module(rabbit_hipe).
+
+%% HiPE compilation uses multiple cores anyway, but some bits are
+%% IO-bound so we can go faster if we parallelise a bit more. In
+%% practice 2 processes seems just as fast as any other number > 1,
+%% and keeps the progress bar realistic-ish.
+-define(HIPE_PROCESSES, 2).
+
+-export([maybe_hipe_compile/0, log_hipe_result/1]).
+-export([compile_to_directory/1]).
+-export([can_hipe_compile/0]).
+
+%% Compile and load during server startup sequence
+maybe_hipe_compile() ->
+    {ok, Want} = application:get_env(rabbit, hipe_compile),
+    case {Want, can_hipe_compile()} of
+        {true,  true}  -> hipe_compile();
+        {true,  false} -> false;
+        {false, _}     -> {ok, disabled}
+    end.
+
+log_hipe_result({ok, disabled}) ->
+    ok;
+log_hipe_result({ok, already_compiled}) ->
+    rabbit_log:info(
+      "HiPE in use: modules already natively compiled.~n", []);
+log_hipe_result({ok, Count, Duration}) ->
+    rabbit_log:info(
+      "HiPE in use: compiled ~B modules in ~Bs.~n", [Count, Duration]);
+log_hipe_result(false) ->
+    io:format(
+      "~nNot HiPE compiling: HiPE not found in this Erlang installation.~n"),
+    rabbit_log:warning(
+      "Not HiPE compiling: HiPE not found in this Erlang installation.~n").
+
+hipe_compile() ->
+    hipe_compile(fun compile_and_load/1, false).
+
+compile_to_directory(Dir0) ->
+    Dir = rabbit_file:filename_as_a_directory(Dir0),
+    ok = prepare_ebin_directory(Dir),
+    hipe_compile(fun (Mod) -> compile_and_save(Mod, Dir) end, true).
+
+needs_compilation(Mod, Force) ->
+    Exists = code:which(Mod) =/= non_existing,
+    %% We skip modules already natively compiled. This
+    %% happens when RabbitMQ is stopped (just the
+    %% application, not the entire node) and started
+    %% again.
+    NotYetCompiled = not already_hipe_compiled(Mod),
+    NotVersioned = not compiled_with_version_support(Mod),
+    Exists andalso (Force orelse (NotYetCompiled andalso NotVersioned)).
+
+%% HiPE compilation happens before we have log handlers and can take a
+%% long time, so make an exception to our no-stdout policy and display
+%% progress via stdout.
+hipe_compile(CompileFun, Force) ->
+    {ok, HipeModulesAll} = application:get_env(rabbit, hipe_modules),
+    HipeModules = lists:filter(fun(Mod) -> needs_compilation(Mod, Force) end, HipeModulesAll),
+    case HipeModules of
+        [] -> {ok, already_compiled};
+        _  -> do_hipe_compile(HipeModules, CompileFun)
+    end.
+
+already_hipe_compiled(Mod) ->
+    try
+    %% OTP 18.x or later
+        Mod:module_info(native) =:= true
+    %% OTP prior to 18.x
+    catch error:badarg ->
+        code:is_module_native(Mod) =:= true
+    end.
+
+compiled_with_version_support(Mod) ->
+    proplists:get_value(erlang_version_support, Mod:module_info(attributes))
+        =/= undefined.
+
+do_hipe_compile(HipeModules, CompileFun) ->
+    Count = length(HipeModules),
+    io:format("~nHiPE compiling:  |~s|~n                 |",
+              [string:copies("-", Count)]),
+    T1 = time_compat:monotonic_time(),
+    %% We use code:get_object_code/1 below to get the beam binary,
+    %% instead of letting hipe get it itself, because hipe:c/{1,2}
+    %% expects the given filename to actually exist on disk: it does not
+    %% work with an EZ archive (rabbit_common is one).
+    %%
+    %% Then we use the mode advanced hipe:compile/4 API because the
+    %% simpler hipe:c/3 is not exported (as of Erlang 18.1.4). This
+    %% advanced API does not load automatically the code, except if the
+    %% 'load' option is set.
+    PidMRefs = [spawn_monitor(fun () -> [begin
+                                             CompileFun(M),
+                                             io:format("#")
+                                         end || M <- Ms]
+                              end) ||
+                   Ms <- split(HipeModules, ?HIPE_PROCESSES)],
+    [receive
+         {'DOWN', MRef, process, _, normal} -> ok;
+         {'DOWN', MRef, process, _, Reason} -> exit(Reason)
+     end || {_Pid, MRef} <- PidMRefs],
+    T2 = time_compat:monotonic_time(),
+    Duration = time_compat:convert_time_unit(T2 - T1, native, seconds),
+    io:format("|~n~nCompiled ~B modules in ~Bs~n", [Count, Duration]),
+    {ok, Count, Duration}.
+
+split(L, N) -> split0(L, [[] || _ <- lists:seq(1, N)]).
+
+split0([],       Ls)       -> Ls;
+split0([I | Is], [L | Ls]) -> split0(Is, Ls ++ [[I | L]]).
+
+prepare_ebin_directory(Dir) ->
+    ok = rabbit_file:ensure_dir(Dir),
+    ok = delete_beam_files(Dir),
+    ok.
+
+delete_beam_files(Dir) ->
+    {ok, Files} = file:list_dir(Dir),
+    lists:foreach(fun(File) ->
+                          case filename:extension(File) of
+                              ".beam" ->
+                                  ok = file:delete(filename:join([Dir, File]));
+                              _ ->
+                                  ok
+                          end
+                  end,
+                  Files).
+
+compile_and_load(Mod) ->
+    {Mod, Beam, _} = code:get_object_code(Mod),
+    {ok, _} = hipe:compile(Mod, [], Beam, [o3, load]).
+
+compile_and_save(Module, Dir) ->
+    {Module, BeamCode, _} = code:get_object_code(Module),
+    BeamName = filename:join([Dir, atom_to_list(Module) ++ ".beam"]),
+    {ok, {Architecture, NativeCode}} = hipe:compile(Module, [], BeamCode, [o3]),
+    {ok, _, Chunks0} = beam_lib:all_chunks(BeamCode),
+    ChunkName = hipe_unified_loader:chunk_name(Architecture),
+    Chunks1 = lists:keydelete(ChunkName, 1, Chunks0),
+    Chunks = Chunks1 ++ [{ChunkName,NativeCode}],
+    {ok, BeamPlusNative} = beam_lib:build_module(Chunks),
+    ok = file:write_file(BeamName, BeamPlusNative),
+    BeamName.
+
+can_hipe_compile() ->
+    code:which(hipe) =/= non_existing.
similarity index 90%
rename from rabbitmq-server/src/rabbit_limiter.erl
rename to deps/rabbit/src/rabbit_limiter.erl
index 5f148a5284f74660e87de40a18033169099a379c..203e309b029504eb902822759b45b45ce5b91003 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% The purpose of the limiter is to stem the flow of messages from
 -record(lstate, {pid, prefetch_limited}).
 -record(qstate, {pid, state, credits}).
 
--ifdef(use_specs).
-
--type(lstate() :: #lstate{pid              :: pid(),
-                          prefetch_limited :: boolean()}).
--type(qstate() :: #qstate{pid :: pid(),
-                          state :: 'dormant' | 'active' | 'suspended'}).
-
--type(credit_mode() :: 'manual' | 'drain' | 'auto').
-
--spec(start_link/1 :: (rabbit_types:proc_name()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(new/1 :: (pid()) -> lstate()).
-
--spec(limit_prefetch/3      :: (lstate(), non_neg_integer(), non_neg_integer())
-                               -> lstate()).
--spec(unlimit_prefetch/1    :: (lstate()) -> lstate()).
--spec(is_active/1           :: (lstate()) -> boolean()).
--spec(get_prefetch_limit/1  :: (lstate()) -> non_neg_integer()).
--spec(ack/2                 :: (lstate(), non_neg_integer()) -> 'ok').
--spec(pid/1                 :: (lstate()) -> pid()).
-
--spec(client/1       :: (pid()) -> qstate()).
--spec(activate/1     :: (qstate()) -> qstate()).
--spec(can_send/3     :: (qstate(), boolean(), rabbit_types:ctag()) ->
-                             {'continue' | 'suspend', qstate()}).
--spec(resume/1       :: (qstate()) -> qstate()).
--spec(deactivate/1   :: (qstate()) -> qstate()).
--spec(is_suspended/1 :: (qstate()) -> boolean()).
--spec(is_consumer_blocked/2 :: (qstate(), rabbit_types:ctag()) -> boolean()).
--spec(credit/5 :: (qstate(), rabbit_types:ctag(), non_neg_integer(),
-                   credit_mode(), boolean()) -> {boolean(), qstate()}).
--spec(ack_from_queue/3 :: (qstate(), rabbit_types:ctag(), non_neg_integer())
-                          -> {boolean(), qstate()}).
--spec(drained/1 :: (qstate())
-                   -> {[{rabbit_types:ctag(), non_neg_integer()}], qstate()}).
--spec(forget_consumer/2 :: (qstate(), rabbit_types:ctag()) -> qstate()).
-
--endif.
+-type lstate() :: #lstate{pid              :: pid(),
+                          prefetch_limited :: boolean()}.
+-type qstate() :: #qstate{pid :: pid(),
+                          state :: 'dormant' | 'active' | 'suspended'}.
+
+-type credit_mode() :: 'manual' | 'drain' | 'auto'.
+
+-spec start_link(rabbit_types:proc_name()) ->
+                           rabbit_types:ok_pid_or_error().
+-spec new(pid()) -> lstate().
+
+-spec limit_prefetch(lstate(), non_neg_integer(), non_neg_integer()) ->
+          lstate().
+-spec unlimit_prefetch(lstate()) -> lstate().
+-spec is_active(lstate()) -> boolean().
+-spec get_prefetch_limit(lstate()) -> non_neg_integer().
+-spec ack(lstate(), non_neg_integer()) -> 'ok'.
+-spec pid(lstate()) -> pid().
+
+-spec client(pid()) -> qstate().
+-spec activate(qstate()) -> qstate().
+-spec can_send(qstate(), boolean(), rabbit_types:ctag()) ->
+          {'continue' | 'suspend', qstate()}.
+-spec resume(qstate()) -> qstate().
+-spec deactivate(qstate()) -> qstate().
+-spec is_suspended(qstate()) -> boolean().
+-spec is_consumer_blocked(qstate(), rabbit_types:ctag()) -> boolean().
+-spec credit
+        (qstate(), rabbit_types:ctag(), non_neg_integer(), credit_mode(),
+         boolean()) ->
+            {boolean(), qstate()}.
+-spec ack_from_queue(qstate(), rabbit_types:ctag(), non_neg_integer()) ->
+          {boolean(), qstate()}.
+-spec drained(qstate()) ->
+          {[{rabbit_types:ctag(), non_neg_integer()}], qstate()}.
+-spec forget_consumer(qstate(), rabbit_types:ctag()) -> qstate().
 
 %%----------------------------------------------------------------------------
 
@@ -434,7 +432,7 @@ notify_queues(State = #lim{ch_pid = ChPid, queues = Queues}) ->
             %% We randomly vary the position of queues in the list,
             %% thus ensuring that each queue has an equal chance of
             %% being notified first.
-            {L1, L2} = lists:split(random:uniform(L), QList),
+            {L1, L2} = lists:split(rand_compat:uniform(L), QList),
             [[ok = rabbit_amqqueue:resume(Q, ChPid) || Q <- L3]
              || L3 <- [L2, L1]],
             ok
similarity index 75%
rename from rabbitmq-server/src/rabbit_log.erl
rename to deps/rabbit/src/rabbit_log.erl
index 083204df058da22347a1e2bcd52ab98d19f16b4b..337fb23f840b5ee617bc37e71837c37f9295ebdd 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_log).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([level/0]).
 
--type(category() :: atom()).
--type(level() :: 'debug' | 'info' | 'warning' | 'error').
-
--spec(log/3 :: (category(), level(), string()) -> 'ok').
--spec(log/4 :: (category(), level(), string(), [any()]) -> 'ok').
+-type category() :: atom().
+-type level() :: 'debug' | 'info' | 'warning' | 'error'.
 
--spec(debug/1   :: (string()) -> 'ok').
--spec(debug/2   :: (string(), [any()]) -> 'ok').
--spec(info/1    :: (string()) -> 'ok').
--spec(info/2    :: (string(), [any()]) -> 'ok').
--spec(warning/1 :: (string()) -> 'ok').
--spec(warning/2 :: (string(), [any()]) -> 'ok').
--spec(error/1   :: (string()) -> 'ok').
--spec(error/2   :: (string(), [any()]) -> 'ok').
+-spec log(category(), level(), string()) -> 'ok'.
+-spec log(category(), level(), string(), [any()]) -> 'ok'.
 
--spec(with_local_io/1 :: (fun (() -> A)) -> A).
+-spec debug(string()) -> 'ok'.
+-spec debug(string(), [any()]) -> 'ok'.
+-spec info(string()) -> 'ok'.
+-spec info(string(), [any()]) -> 'ok'.
+-spec warning(string()) -> 'ok'.
+-spec warning(string(), [any()]) -> 'ok'.
+-spec error(string()) -> 'ok'.
+-spec error(string(), [any()]) -> 'ok'.
 
--endif.
+-spec with_local_io(fun (() -> A)) -> A.
 
 %%----------------------------------------------------------------------------
 
@@ -96,10 +92,20 @@ with_local_io(Fun) ->
     Node = node(),
     case node(GL) of
         Node -> Fun();
-        _    -> group_leader(whereis(user), self()),
+        _    -> set_group_leader_to_user_safely(whereis(user)),
                 try
                     Fun()
                 after
                     group_leader(GL, self())
                 end
     end.
+
+set_group_leader_to_user_safely(undefined) ->
+    handle_damaged_io_system();
+set_group_leader_to_user_safely(User) when is_pid(User) ->
+    group_leader(User, self()).
+
+handle_damaged_io_system() ->
+    Msg = "Erlang VM I/O system is damaged, restart needed~n",
+    io:format(standard_error, Msg, []),
+    exit(erlang_vm_restart_needed).
similarity index 94%
rename from rabbitmq-server/src/rabbit_memory_monitor.erl
rename to deps/rabbit/src/rabbit_memory_monitor.erl
index 7aa29fc42332d973adcf5cd28e648c70fba734ef..6fd12b30ff74fe20df7c5bdd80af83daf46f3800 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 
@@ -41,7 +41,6 @@
                }).
 
 -define(SERVER, ?MODULE).
--define(DEFAULT_UPDATE_INTERVAL, 2500).
 -define(TABLE_NAME, ?MODULE).
 
 %% If all queues are pushed to disk (duration 0), then the sum of
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(register/2 :: (pid(), {atom(),atom(),[any()]}) -> 'ok').
--spec(deregister/1 :: (pid()) -> 'ok').
--spec(report_ram_duration/2 ::
-        (pid(), float() | 'infinity') -> number() | 'infinity').
--spec(stop/0 :: () -> 'ok').
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec register(pid(), {atom(),atom(),[any()]}) -> 'ok'.
+-spec deregister(pid()) -> 'ok'.
+-spec report_ram_duration
+        (pid(), float() | 'infinity') -> number() | 'infinity'.
+-spec stop() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 %% Public API
@@ -87,7 +82,9 @@ report_ram_duration(Pid, QueueDuration) ->
 stop() ->
     gen_server2:cast(?SERVER, stop).
 
-conserve_resources(Pid, disk, Conserve) ->
+%% Paging should be enabled/disabled only in response to disk resource alarms
+%% for the current node.
+conserve_resources(Pid, disk, {_, Conserve, Node}) when node(Pid) =:= Node ->
     gen_server2:cast(Pid, {disk_alarm, Conserve});
 conserve_resources(_Pid, _Source, _Conserve) ->
     ok.
@@ -110,7 +107,8 @@ memory_use(ratio) ->
 %%----------------------------------------------------------------------------
 
 init([]) ->
-    {ok, TRef} = timer:send_interval(?DEFAULT_UPDATE_INTERVAL, update),
+    {ok, Interval} = application:get_env(rabbit, memory_monitor_interval),
+    {ok, TRef} = timer:send_interval(Interval, update),
 
     Ets = ets:new(?TABLE_NAME, [set, private, {keypos, #process.pid}]),
     Alarms = rabbit_alarm:register(self(), {?MODULE, conserve_resources, []}),
similarity index 91%
rename from rabbitmq-server/src/rabbit_mirror_queue_coordinator.erl
rename to deps/rabbit/src/rabbit_mirror_queue_coordinator.erl
index 77a145a9cfdcbbdf688f83a1c60f27b8fb91ee12..562f0f0fcfbef67e5f2c95123b9ed30833e0c97b 100644 (file)
                  depth_fun
                }).
 
--ifdef(use_specs).
-
--spec(start_link/4 :: (rabbit_types:amqqueue(), pid() | 'undefined',
-                       rabbit_mirror_queue_master:death_fun(),
-                       rabbit_mirror_queue_master:depth_fun()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(get_gm/1 :: (pid()) -> pid()).
--spec(ensure_monitoring/2 :: (pid(), [pid()]) -> 'ok').
-
--endif.
+-spec start_link
+        (rabbit_types:amqqueue(), pid() | 'undefined',
+         rabbit_mirror_queue_master:death_fun(),
+         rabbit_mirror_queue_master:depth_fun()) ->
+            rabbit_types:ok_pid_or_error().
+-spec get_gm(pid()) -> pid().
+-spec ensure_monitoring(pid(), [pid()]) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 %%
@@ -358,17 +355,34 @@ handle_cast({gm_deaths, DeadGMPids},
                                                    DeadPids),
             rabbit_mirror_queue_misc:add_mirrors(QueueName, ExtraNodes, async),
             noreply(State);
+        {ok, _MPid0, DeadPids, _ExtraNodes} ->
+            %% see rabbitmq-server#914;
+            %% Different slave is now master, stop current coordinator normally.
+            %% Initiating queue is now slave and the least we could do is report
+            %% deaths which we 'think' we saw.
+            %% NOTE: Reported deaths here, could be inconsistant.
+            rabbit_mirror_queue_misc:report_deaths(MPid, false, QueueName,
+                                                   DeadPids),
+            {stop, shutdown, State};
         {error, not_found} ->
             {stop, normal, State}
     end;
 
-handle_cast(request_depth, State = #state { depth_fun = DepthFun }) ->
-    ok = DepthFun(),
-    noreply(State);
+handle_cast(request_depth, State = #state { depth_fun = DepthFun,
+                                           q  = #amqqueue { name = QName, pid = MPid }}) ->
+    case rabbit_amqqueue:lookup(QName) of
+       {ok, #amqqueue{ pid = MPid }} ->
+           ok = DepthFun(),
+           noreply(State);
+       _ ->
+           {stop, shutdown, State}
+    end;
 
 handle_cast({ensure_monitoring, Pids}, State = #state { monitors = Mons }) ->
     noreply(State #state { monitors = pmon:monitor_all(Pids, Mons) });
 
+handle_cast({delete_and_terminate, {shutdown, ring_shutdown}}, State) ->
+    {stop, normal, State};
 handle_cast({delete_and_terminate, Reason}, State) ->
     {stop, Reason, State}.
 
@@ -416,13 +430,22 @@ handle_msg([CPid], _From, request_depth = Msg) ->
     ok = gen_server2:cast(CPid, Msg);
 handle_msg([CPid], _From, {ensure_monitoring, _Pids} = Msg) ->
     ok = gen_server2:cast(CPid, Msg);
-handle_msg([CPid], _From, {delete_and_terminate, _Reason} = Msg) ->
-    ok = gen_server2:cast(CPid, Msg),
+handle_msg([_CPid], _From, {delete_and_terminate, _Reason}) ->
+    %% We tell GM to stop, but we don't instruct the coordinator to
+    %% stop yet. The GM will first make sure all pending messages were
+    %% actually delivered. Then it calls handle_terminate/2 below so the
+    %% coordinator is stopped.
+    %%
+    %% If we stop the coordinator right now, remote slaves could see the
+    %% coordinator DOWN before delete_and_terminate was delivered to all
+    %% GMs. One of those GM would be promoted as the master, and this GM
+    %% would hang forever, waiting for other GMs to stop.
     {stop, {shutdown, ring_shutdown}};
 handle_msg([_CPid], _From, _Msg) ->
     ok.
 
-handle_terminate([_CPid], _Reason) ->
+handle_terminate([CPid], Reason) ->
+    ok = gen_server2:cast(CPid, {delete_and_terminate, Reason}),
     ok.
 
 %% ---------------------------------------------------------------------------
similarity index 75%
rename from rabbitmq-server/src/rabbit_mirror_queue_master.erl
rename to deps/rabbit/src/rabbit_mirror_queue_master.erl
index 7890128872452635b2ee4a97c1330176edd18347..d82cdf336abf4f200ae9e8cfad9c131fb87016b8 100644 (file)
 
 -export([init/3, terminate/2, delete_and_terminate/2,
          purge/1, purge_acks/1, publish/6, publish_delivered/5,
+         batch_publish/4, batch_publish_delivered/4,
          discard/4, fetch/2, drop/2, ack/2, requeue/2, ackfold/4, fold/3,
          len/1, is_empty/1, depth/1, drain_confirmed/1,
          dropwhile/2, fetchwhile/4, set_ram_duration_target/2, ram_duration/1,
          needs_timeout/1, timeout/1, handle_pre_hibernate/1, resume/1,
-         msg_rates/1, info/2, invoke/3, is_duplicate/2]).
+         msg_rates/1, info/2, invoke/3, is_duplicate/2, set_queue_mode/2,
+         zip_msgs_and_acks/4]).
 
 -export([start/1, stop/0, delete_crashed/1]).
 
                  backing_queue_state,
                  seen_status,
                  confirmed,
-                 known_senders
+                 known_senders,
+                 wait_timeout
                }).
 
--ifdef(use_specs).
-
 -export_type([death_fun/0, depth_fun/0, stats_fun/0]).
 
--type(death_fun() :: fun ((pid()) -> 'ok')).
--type(depth_fun() :: fun (() -> 'ok')).
--type(stats_fun() :: fun ((any()) -> 'ok')).
--type(master_state() :: #state { name                :: rabbit_amqqueue:name(),
+-type death_fun() :: fun ((pid()) -> 'ok').
+-type depth_fun() :: fun (() -> 'ok').
+-type stats_fun() :: fun ((any()) -> 'ok').
+-type master_state() :: #state { name                :: rabbit_amqqueue:name(),
                                  gm                  :: pid(),
                                  coordinator         :: pid(),
                                  backing_queue       :: atom(),
                                  backing_queue_state :: any(),
-                                 seen_status         :: dict:dict(),
+                                 seen_status         :: ?DICT_TYPE(),
                                  confirmed           :: [rabbit_guid:guid()],
-                                 known_senders       :: sets:set()
-                               }).
-
--spec(promote_backing_queue_state/8 ::
+                                 known_senders       :: ?SET_TYPE()
+                               }.
+-spec promote_backing_queue_state
         (rabbit_amqqueue:name(), pid(), atom(), any(), pid(), [any()],
-         dict:dict(), [pid()]) -> master_state()).
--spec(sender_death_fun/0 :: () -> death_fun()).
--spec(depth_fun/0 :: () -> depth_fun()).
--spec(init_with_existing_bq/3 :: (rabbit_types:amqqueue(), atom(), any()) ->
-                                      master_state()).
--spec(stop_mirroring/1 :: (master_state()) -> {atom(), any()}).
--spec(sync_mirrors/3 :: (stats_fun(), stats_fun(), master_state()) ->
-    {'ok', master_state()} | {stop, any(), master_state()}).
+         ?DICT_TYPE(), [pid()]) ->
+            master_state().
 
--endif.
+-spec sender_death_fun() -> death_fun().
+-spec depth_fun() -> depth_fun().
+-spec init_with_existing_bq(rabbit_types:amqqueue(), atom(), any()) ->
+          master_state().
+-spec stop_mirroring(master_state()) -> {atom(), any()}.
+-spec sync_mirrors(stats_fun(), stats_fun(), master_state()) ->
+          {'ok', master_state()} | {stop, any(), master_state()}.
 
 %% For general documentation of HA design, see
 %% rabbit_mirror_queue_coordinator
@@ -101,34 +101,43 @@ init(Q, Recover, AsyncCallback) ->
     State.
 
 init_with_existing_bq(Q = #amqqueue{name = QName}, BQ, BQS) ->
-    {ok, CPid} = rabbit_mirror_queue_coordinator:start_link(
-                   Q, undefined, sender_death_fun(), depth_fun()),
-    GM = rabbit_mirror_queue_coordinator:get_gm(CPid),
-    Self = self(),
-    ok = rabbit_misc:execute_mnesia_transaction(
-           fun () ->
-                   [Q1 = #amqqueue{gm_pids = GMPids}]
-                       = mnesia:read({rabbit_queue, QName}),
-                   ok = rabbit_amqqueue:store_queue(
-                          Q1#amqqueue{gm_pids = [{GM, Self} | GMPids],
-                                      state   = live})
-           end),
-    {_MNode, SNodes} = rabbit_mirror_queue_misc:suggested_queue_nodes(Q),
-    %% We need synchronous add here (i.e. do not return until the
-    %% slave is running) so that when queue declaration is finished
-    %% all slaves are up; we don't want to end up with unsynced slaves
-    %% just by declaring a new queue. But add can't be synchronous all
-    %% the time as it can be called by slaves and that's
-    %% deadlock-prone.
-    rabbit_mirror_queue_misc:add_mirrors(QName, SNodes, sync),
-    #state { name                = QName,
-             gm                  = GM,
-             coordinator         = CPid,
-             backing_queue       = BQ,
-             backing_queue_state = BQS,
-             seen_status         = dict:new(),
-             confirmed           = [],
-             known_senders       = sets:new() }.
+    case rabbit_mirror_queue_coordinator:start_link(
+          Q, undefined, sender_death_fun(), depth_fun()) of
+       {ok, CPid} ->
+           GM = rabbit_mirror_queue_coordinator:get_gm(CPid),
+           Self = self(),
+           ok = rabbit_misc:execute_mnesia_transaction(
+                  fun () ->
+                          [Q1 = #amqqueue{gm_pids = GMPids}]
+                              = mnesia:read({rabbit_queue, QName}),
+                          ok = rabbit_amqqueue:store_queue(
+                                 Q1#amqqueue{gm_pids = [{GM, Self} | GMPids],
+                                             state   = live})
+                  end),
+           {_MNode, SNodes} = rabbit_mirror_queue_misc:suggested_queue_nodes(Q),
+           %% We need synchronous add here (i.e. do not return until the
+           %% slave is running) so that when queue declaration is finished
+           %% all slaves are up; we don't want to end up with unsynced slaves
+           %% just by declaring a new queue. But add can't be synchronous all
+           %% the time as it can be called by slaves and that's
+           %% deadlock-prone.
+           rabbit_mirror_queue_misc:add_mirrors(QName, SNodes, sync),
+           #state { name                = QName,
+                    gm                  = GM,
+                    coordinator         = CPid,
+                    backing_queue       = BQ,
+                    backing_queue_state = BQS,
+                    seen_status         = dict:new(),
+                    confirmed           = [],
+                    known_senders       = sets:new(),
+                    wait_timeout        = rabbit_misc:get_env(rabbit, slave_wait_timeout, 15000) };
+       {error, Reason} ->
+           %% The GM can shutdown before the coordinator has started up
+           %% (lost membership or missing group), thus the start_link of
+           %% the coordinator returns {error, shutdown} as rabbit_amqqueue_process
+           % is trapping exists
+           throw({coordinator_not_started, Reason})
+    end.
 
 stop_mirroring(State = #state { coordinator         = CPid,
                                 backing_queue       = BQ,
@@ -147,13 +156,15 @@ sync_mirrors(HandleInfo, EmitStats,
                     QName, "Synchronising: " ++ Fmt ++ "~n", Params)
           end,
     Log("~p messages to synchronise", [BQ:len(BQS)]),
-    {ok, #amqqueue{slave_pids = SPids}} = rabbit_amqqueue:lookup(QName),
+    {ok, #amqqueue{slave_pids = SPids} = Q} = rabbit_amqqueue:lookup(QName),
+    SyncBatchSize = rabbit_mirror_queue_misc:sync_batch_size(Q),
+    Log("batch size: ~p", [SyncBatchSize]),
     Ref = make_ref(),
     Syncer = rabbit_mirror_queue_sync:master_prepare(Ref, QName, Log, SPids),
     gm:broadcast(GM, {sync_start, Ref, Syncer, SPids}),
     S = fun(BQSN) -> State#state{backing_queue_state = BQSN} end,
     case rabbit_mirror_queue_sync:master_go(
-           Syncer, Ref, Log, HandleInfo, EmitStats, BQ, BQS) of
+           Syncer, Ref, Log, HandleInfo, EmitStats, SyncBatchSize, BQ, BQS) of
         {shutdown,  R, BQS1}   -> {stop, R, S(BQS1)};
         {sync_died, R, BQS1}   -> Log("~p", [R]),
                                   {ok, S(BQS1)};
@@ -199,7 +210,7 @@ delete_and_terminate(Reason, State = #state { backing_queue       = BQ,
     stop_all_slaves(Reason, State),
     State#state{backing_queue_state = BQ:delete_and_terminate(Reason, BQS)}.
 
-stop_all_slaves(Reason, #state{name = QName, gm = GM}) ->
+stop_all_slaves(Reason, #state{name = QName, gm = GM, wait_timeout = WT}) ->
     {ok, #amqqueue{slave_pids = SPids}} = rabbit_amqqueue:lookup(QName),
     PidsMRefs = [{Pid, erlang:monitor(process, Pid)} || Pid <- [GM | SPids]],
     ok = gm:broadcast(GM, {delete_and_terminate, Reason}),
@@ -208,7 +219,15 @@ stop_all_slaves(Reason, #state{name = QName, gm = GM}) ->
     %% monitor them but they would not have received the GM
     %% message. So only wait for slaves which are still
     %% not-partitioned.
-    [receive {'DOWN', MRef, process, _Pid, _Info} -> ok end
+    [receive
+         {'DOWN', MRef, process, _Pid, _Info} ->
+             ok
+     after WT ->
+             rabbit_mirror_queue_misc:log_warning(
+               QName, "Missing 'DOWN' message from ~p in node ~p~n",
+               [Pid, node(Pid)]),
+             ok
+     end
      || {Pid, MRef} <- PidsMRefs, rabbit_mnesia:on_running_node(Pid)],
     %% Normally when we remove a slave another slave or master will
     %% notice and update Mnesia. But we just removed them all, and
@@ -241,6 +260,27 @@ publish(Msg = #basic_message { id = MsgId }, MsgProps, IsDelivered, ChPid, Flow,
     BQS1 = BQ:publish(Msg, MsgProps, IsDelivered, ChPid, Flow, BQS),
     ensure_monitoring(ChPid, State #state { backing_queue_state = BQS1 }).
 
+batch_publish(Publishes, ChPid, Flow,
+              State = #state { gm                  = GM,
+                               seen_status         = SS,
+                               backing_queue       = BQ,
+                               backing_queue_state = BQS }) ->
+    {Publishes1, false, MsgSizes} =
+        lists:foldl(fun ({Msg = #basic_message { id = MsgId },
+                          MsgProps, _IsDelivered}, {Pubs, false, Sizes}) ->
+                            {[{Msg, MsgProps, true} | Pubs], %% [0]
+                             false = dict:is_key(MsgId, SS), %% ASSERTION
+                             Sizes + rabbit_basic:msg_size(Msg)}
+                    end, {[], false, 0}, Publishes),
+    Publishes2 = lists:reverse(Publishes1),
+    ok = gm:broadcast(GM, {batch_publish, ChPid, Flow, Publishes2},
+                      MsgSizes),
+    BQS1 = BQ:batch_publish(Publishes2, ChPid, Flow, BQS),
+    ensure_monitoring(ChPid, State #state { backing_queue_state = BQS1 }).
+%% [0] When the slave process handles the publish command, it sets the
+%% IsDelivered flag to true, so to avoid iterating over the messages
+%% again at the slave, we do it here.
+
 publish_delivered(Msg = #basic_message { id = MsgId }, MsgProps,
                   ChPid, Flow, State = #state { gm                  = GM,
                                                 seen_status         = SS,
@@ -253,6 +293,23 @@ publish_delivered(Msg = #basic_message { id = MsgId }, MsgProps,
     State1 = State #state { backing_queue_state = BQS1 },
     {AckTag, ensure_monitoring(ChPid, State1)}.
 
+batch_publish_delivered(Publishes, ChPid, Flow,
+                        State = #state { gm                  = GM,
+                                         seen_status         = SS,
+                                         backing_queue       = BQ,
+                                         backing_queue_state = BQS }) ->
+    {false, MsgSizes} =
+        lists:foldl(fun ({Msg = #basic_message { id = MsgId }, _MsgProps},
+                         {false, Sizes}) ->
+                            {false = dict:is_key(MsgId, SS), %% ASSERTION
+                             Sizes + rabbit_basic:msg_size(Msg)}
+                    end, {false, 0}, Publishes),
+    ok = gm:broadcast(GM, {batch_publish_delivered, ChPid, Flow, Publishes},
+                      MsgSizes),
+    {AckTags, BQS1} = BQ:batch_publish_delivered(Publishes, ChPid, Flow, BQS),
+    State1 = State #state { backing_queue_state = BQS1 },
+    {AckTags, ensure_monitoring(ChPid, State1)}.
+
 discard(MsgId, ChPid, Flow, State = #state { gm                  = GM,
                                              backing_queue       = BQ,
                                              backing_queue_state = BQS,
@@ -311,7 +368,7 @@ fetch(AckRequired, State = #state { backing_queue       = BQ,
     State1 = State #state { backing_queue_state = BQS1 },
     {Result, case Result of
                  empty                          -> State1;
-                 {_MsgId, _IsDelivered, AckTag} -> drop_one(AckTag, State1)
+                 {_MsgId, _IsDelivered, _AckTag} -> drop_one(AckRequired, State1)
              end}.
 
 drop(AckRequired, State = #state { backing_queue       = BQ,
@@ -320,7 +377,7 @@ drop(AckRequired, State = #state { backing_queue       = BQ,
     State1 = State #state { backing_queue_state = BQS1 },
     {Result, case Result of
                  empty            -> State1;
-                 {_MsgId, AckTag} -> drop_one(AckTag, State1)
+                 {_MsgId, _AckTag} -> drop_one(AckRequired, State1)
              end}.
 
 ack(AckTags, State = #state { gm                  = GM,
@@ -444,6 +501,18 @@ is_duplicate(Message = #basic_message { id = MsgId },
                                   confirmed = [MsgId | Confirmed] }}
     end.
 
+set_queue_mode(Mode, State = #state { gm                  = GM,
+                                      backing_queue       = BQ,
+                                      backing_queue_state = BQS }) ->
+    ok = gm:broadcast(GM, {set_queue_mode, Mode}),
+    BQS1 = BQ:set_queue_mode(Mode, BQS),
+    State #state { backing_queue_state = BQS1 }.
+
+zip_msgs_and_acks(Msgs, AckTags, Accumulator,
+                  #state { backing_queue = BQ,
+                           backing_queue_state = BQS }) ->
+    BQ:zip_msgs_and_acks(Msgs, AckTags, Accumulator, BQS).
+
 %% ---------------------------------------------------------------------------
 %% Other exported functions
 %% ---------------------------------------------------------------------------
@@ -454,6 +523,7 @@ promote_backing_queue_state(QName, CPid, BQ, BQS, GM, AckTags, Seen, KS) ->
     Depth = BQ:depth(BQS1),
     true = Len == Depth, %% ASSERTION: everything must have been requeued
     ok = gm:broadcast(GM, {depth, Depth}),
+    WaitTimeout = rabbit_misc:get_env(rabbit, slave_wait_timeout, 15000),
     #state { name                = QName,
              gm                  = GM,
              coordinator         = CPid,
@@ -461,7 +531,8 @@ promote_backing_queue_state(QName, CPid, BQ, BQS, GM, AckTags, Seen, KS) ->
              backing_queue_state = BQS1,
              seen_status         = Seen,
              confirmed           = [],
-             known_senders       = sets:from_list(KS) }.
+             known_senders       = sets:from_list(KS),
+             wait_timeout        = WaitTimeout }.
 
 sender_death_fun() ->
     Self = self(),
@@ -492,10 +563,10 @@ depth_fun() ->
 %% Helpers
 %% ---------------------------------------------------------------------------
 
-drop_one(AckTag, State = #state { gm                  = GM,
-                                  backing_queue       = BQ,
-                                  backing_queue_state = BQS }) ->
-    ok = gm:broadcast(GM, {drop, BQ:len(BQS), 1, AckTag =/= undefined}),
+drop_one(AckRequired, State = #state { gm                  = GM,
+                                       backing_queue       = BQ,
+                                       backing_queue_state = BQS }) ->
+    ok = gm:broadcast(GM, {drop, BQ:len(BQS), 1, AckRequired}),
     State.
 
 drop(PrevLen, AckRequired, State = #state { gm                  = GM,
similarity index 79%
rename from rabbitmq-server/src/rabbit_mirror_queue_misc.erl
rename to deps/rabbit/src/rabbit_mirror_queue_misc.erl
index fee890476e449fbe595d1fcefb58be12378401a7..375a0366dd8dd5fbca7c053e64cc7d24b309bd54 100644 (file)
 -export([remove_from_queue/3, on_node_up/0, add_mirrors/3,
          report_deaths/4, store_updated_slaves/1,
          initial_queue_node/2, suggested_queue_nodes/1,
-         is_mirrored/1, update_mirrors/2, validate_policy/1,
+         is_mirrored/1, update_mirrors/2, update_mirrors/1, validate_policy/1,
          maybe_auto_sync/1, maybe_drop_master_after_sync/1,
-         log_info/3, log_warning/3]).
+         sync_batch_size/1, log_info/3, log_warning/3]).
+
+-export([sync_queue/1, cancel_sync_queue/1]).
 
 %% for testing only
 -export([module/1]).
             [policy_validator, <<"ha-params">>, ?MODULE]}},
      {mfa, {rabbit_registry, register,
             [policy_validator, <<"ha-sync-mode">>, ?MODULE]}},
+     {mfa, {rabbit_registry, register,
+            [policy_validator, <<"ha-sync-batch-size">>, ?MODULE]}},
      {mfa, {rabbit_registry, register,
             [policy_validator, <<"ha-promote-on-shutdown">>, ?MODULE]}},
      {requires, rabbit_registry},
      {enables, recovery}]}).
 
+
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(remove_from_queue/3 ::
-        (rabbit_amqqueue:name(), pid(), [pid()])
-        -> {'ok', pid(), [pid()], [node()]} | {'error', 'not_found'}).
--spec(on_node_up/0 :: () -> 'ok').
--spec(add_mirrors/3 :: (rabbit_amqqueue:name(), [node()], 'sync' | 'async')
-                       -> 'ok').
--spec(store_updated_slaves/1 :: (rabbit_types:amqqueue()) ->
-                                     rabbit_types:amqqueue()).
--spec(initial_queue_node/2 :: (rabbit_types:amqqueue(), node()) -> node()).
--spec(suggested_queue_nodes/1 :: (rabbit_types:amqqueue()) ->
-                                      {node(), [node()]}).
--spec(is_mirrored/1 :: (rabbit_types:amqqueue()) -> boolean()).
--spec(update_mirrors/2 ::
-        (rabbit_types:amqqueue(), rabbit_types:amqqueue()) -> 'ok').
--spec(maybe_drop_master_after_sync/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(maybe_auto_sync/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(log_info/3 :: (rabbit_amqqueue:name(), string(), [any()]) -> 'ok').
--spec(log_warning/3 :: (rabbit_amqqueue:name(), string(), [any()]) -> 'ok').
-
--endif.
+-spec remove_from_queue
+        (rabbit_amqqueue:name(), pid(), [pid()]) ->
+            {'ok', pid(), [pid()], [node()]} | {'error', 'not_found'}.
+-spec on_node_up() -> 'ok'.
+-spec add_mirrors(rabbit_amqqueue:name(), [node()], 'sync' | 'async') ->
+          'ok'.
+-spec store_updated_slaves(rabbit_types:amqqueue()) ->
+          rabbit_types:amqqueue().
+-spec initial_queue_node(rabbit_types:amqqueue(), node()) -> node().
+-spec suggested_queue_nodes(rabbit_types:amqqueue()) ->
+          {node(), [node()]}.
+-spec is_mirrored(rabbit_types:amqqueue()) -> boolean().
+-spec update_mirrors
+        (rabbit_types:amqqueue(), rabbit_types:amqqueue()) -> 'ok'.
+-spec maybe_drop_master_after_sync(rabbit_types:amqqueue()) -> 'ok'.
+-spec maybe_auto_sync(rabbit_types:amqqueue()) -> 'ok'.
+-spec log_info(rabbit_amqqueue:name(), string(), [any()]) -> 'ok'.
+-spec log_warning(rabbit_amqqueue:name(), string(), [any()]) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -75,7 +76,7 @@ remove_from_queue(QueueName, Self, DeadGMPids) ->
     rabbit_misc:execute_mnesia_transaction(
       fun () ->
               %% Someone else could have deleted the queue before we
-              %% get here.
+              %% get here. Or, gm group could've altered. see rabbitmq-server#914
               case mnesia:read({rabbit_queue, QueueName}) of
                   [] -> {error, not_found};
                   [Q = #amqqueue { pid        = QPid,
@@ -89,7 +90,16 @@ remove_from_queue(QueueName, Self, DeadGMPids) ->
                       AlivePids = [Pid || {_GM, Pid} <- AliveGM],
                       Alive     = [Pid || Pid <- [QPid | SPids],
                                           lists:member(Pid, AlivePids)],
-                      {QPid1, SPids1} = promote_slave(Alive),
+                      {QPid1, SPids1} = case Alive of
+                                            [] ->
+                                                %% GM altered, & if all pids are
+                                                %% perceived as dead, rather do
+                                                %% do nothing here, & trust the
+                                                %% promoted slave to have updated
+                                                %% mnesia during the alteration.
+                                                {QPid, SPids};
+                                            _  -> promote_slave(Alive)
+                                        end,
                       Extra =
                           case {{QPid, SPids}, {QPid1, SPids1}} of
                               {Same, Same} ->
@@ -97,7 +107,8 @@ remove_from_queue(QueueName, Self, DeadGMPids) ->
                               _ when QPid =:= QPid1 orelse QPid1 =:= Self ->
                                   %% Either master hasn't changed, so
                                   %% we're ok to update mnesia; or we have
-                                  %% become the master.
+                                  %% become the master. If gm altered,
+                                  %% we have no choice but to proceed.
                                   Q1 = Q#amqqueue{pid        = QPid1,
                                                   slave_pids = SPids1,
                                                   gm_pids    = AliveGM},
@@ -284,10 +295,10 @@ promote_slave([SPid | SPids]) ->
     {SPid, SPids}.
 
 initial_queue_node(Q, DefNode) ->
-    {MNode, _SNodes} = suggested_queue_nodes(Q, DefNode, all_nodes()),
+    {MNode, _SNodes} = suggested_queue_nodes(Q, DefNode, rabbit_nodes:all_running()),
     MNode.
 
-suggested_queue_nodes(Q)      -> suggested_queue_nodes(Q, all_nodes()).
+suggested_queue_nodes(Q)      -> suggested_queue_nodes(Q, rabbit_nodes:all_running()).
 suggested_queue_nodes(Q, All) -> suggested_queue_nodes(Q, node(), All).
 
 %% The third argument exists so we can pull a call to
@@ -309,8 +320,6 @@ suggested_queue_nodes(Q = #amqqueue{exclusive_owner = Owner}, DefNode, All) ->
         _    -> {MNode, []}
     end.
 
-all_nodes() -> rabbit_mnesia:cluster_nodes(running).
-
 policy(Policy, Q) ->
     case rabbit_policy:get(Policy, Q) of
         undefined -> none;
@@ -332,6 +341,14 @@ module(Mode) when is_binary(Mode) ->
                               end
     end.
 
+validate_mode(Mode) ->
+    case module(Mode) of
+        {ok, _Module} ->
+            ok;
+        not_mirrored ->
+            {error, "~p is not a valid ha-mode value", [Mode]}
+    end.
+
 is_mirrored(Q) ->
     case module(Q) of
         {ok, _}  -> true;
@@ -355,19 +372,42 @@ maybe_auto_sync(Q = #amqqueue{pid = QPid}) ->
             ok
     end.
 
+sync_queue(Q) ->
+    rabbit_amqqueue:with(
+      Q, fun(#amqqueue{pid = QPid}) -> rabbit_amqqueue:sync_mirrors(QPid) end).
+
+cancel_sync_queue(Q) ->
+    rabbit_amqqueue:with(
+      Q, fun(#amqqueue{pid = QPid}) ->
+                 rabbit_amqqueue:cancel_sync_mirrors(QPid)
+         end).
+
+sync_batch_size(#amqqueue{} = Q) ->
+    case policy(<<"ha-sync-batch-size">>, Q) of
+        none -> %% we need this case because none > 1 == true
+            default_batch_size();
+        BatchSize when BatchSize > 1 ->
+            BatchSize;
+        _ ->
+            default_batch_size()
+    end.
+
+-define(DEFAULT_BATCH_SIZE, 4096).
+
+default_batch_size() ->
+    rabbit_misc:get_env(rabbit, mirroring_sync_batch_size,
+                        ?DEFAULT_BATCH_SIZE).
+
 update_mirrors(OldQ = #amqqueue{pid = QPid},
                NewQ = #amqqueue{pid = QPid}) ->
     case {is_mirrored(OldQ), is_mirrored(NewQ)} of
         {false, false} -> ok;
-        {true,  false} -> rabbit_amqqueue:stop_mirroring(QPid);
-        {false,  true} -> rabbit_amqqueue:start_mirroring(QPid);
-        {true,   true} -> update_mirrors0(OldQ, NewQ)
+        _ -> rabbit_amqqueue:update_mirroring(QPid)
     end.
 
-update_mirrors0(OldQ = #amqqueue{name = QName},
-                NewQ = #amqqueue{name = QName}) ->
-    {OldMNode, OldSNodes, _} = actual_queue_nodes(OldQ),
-    {NewMNode, NewSNodes}    = suggested_queue_nodes(NewQ),
+update_mirrors(Q = #amqqueue{name = QName}) ->
+    {OldMNode, OldSNodes, _} = actual_queue_nodes(Q),
+    {NewMNode, NewSNodes}    = suggested_queue_nodes(Q),
     OldNodes = [OldMNode | OldSNodes],
     NewNodes = [NewMNode | NewSNodes],
     %% When a mirror dies, remove_from_queue/2 might have to add new
@@ -381,7 +421,7 @@ update_mirrors0(OldQ = #amqqueue{name = QName},
     drop_mirrors(QName, OldNodes -- NewNodes),
     %% This is for the case where no extra nodes were added but we changed to
     %% a policy requiring auto-sync.
-    maybe_auto_sync(NewQ),
+    maybe_auto_sync(Q),
     ok.
 
 %% The arrival of a newly synced slave may cause the master to die if
@@ -410,25 +450,37 @@ validate_policy(KeyList) ->
     Mode = proplists:get_value(<<"ha-mode">>, KeyList, none),
     Params = proplists:get_value(<<"ha-params">>, KeyList, none),
     SyncMode = proplists:get_value(<<"ha-sync-mode">>, KeyList, none),
+    SyncBatchSize = proplists:get_value(
+                      <<"ha-sync-batch-size">>, KeyList, none),
     PromoteOnShutdown = proplists:get_value(
                           <<"ha-promote-on-shutdown">>, KeyList, none),
-    case {Mode, Params, SyncMode, PromoteOnShutdown} of
-        {none, none, none, none} ->
+    case {Mode, Params, SyncMode, SyncBatchSize, PromoteOnShutdown} of
+        {none, none, none, none, none} ->
             ok;
-        {none, _, _, _} ->
+        {none, _, _, _, _} ->
             {error, "ha-mode must be specified to specify ha-params, "
              "ha-sync-mode or ha-promote-on-shutdown", []};
         _ ->
-            case module(Mode) of
-                {ok, M} -> case M:validate_policy(Params) of
-                               ok -> case validate_sync_mode(SyncMode) of
-                                         ok -> validate_pos(PromoteOnShutdown);
-                                         E  -> E
-                                     end;
-                               E  -> E
-                           end;
-                _       -> {error, "~p is not a valid ha-mode value", [Mode]}
-            end
+            validate_policies(
+              [{Mode, fun validate_mode/1},
+               {Params, ha_params_validator(Mode)},
+               {SyncMode, fun validate_sync_mode/1},
+               {SyncBatchSize, fun validate_sync_batch_size/1},
+               {PromoteOnShutdown, fun validate_pos/1}])
+    end.
+
+ha_params_validator(Mode) ->
+    fun(Val) ->
+            {ok, M} = module(Mode),
+            M:validate_policy(Val)
+    end.
+
+validate_policies([]) ->
+    ok;
+validate_policies([{Val, Validator} | Rest]) ->
+    case Validator(Val) of
+        ok -> validate_policies(Rest);
+        E  -> E
     end.
 
 validate_sync_mode(SyncMode) ->
@@ -440,6 +492,14 @@ validate_sync_mode(SyncMode) ->
                             "or \"automatic\", got ~p", [Mode]}
     end.
 
+validate_sync_batch_size(none) ->
+    ok;
+validate_sync_batch_size(N) when is_integer(N) andalso N > 0 ->
+    ok;
+validate_sync_batch_size(N) ->
+    {error, "ha-sync-batch-size takes an integer greather than 0, "
+     "~p given", [N]}.
+
 validate_pos(PromoteOnShutdown) ->
     case PromoteOnShutdown of
         <<"always">>      -> ok;
similarity index 83%
rename from rabbitmq-server/src/rabbit_mirror_queue_mode.erl
rename to deps/rabbit/src/rabbit_mirror_queue_mode.erl
index 5bb243746a09fe52c585e912e23d0c757fc5b887..3733c7f0f8df92c1770ba707b01d1f917e2f10f9 100644 (file)
 
 -module(rabbit_mirror_queue_mode).
 
--ifdef(use_specs).
-
--type(master() :: node()).
--type(slave() :: node()).
--type(params() :: any()).
+-type master() :: node().
+-type slave() :: node().
+-type params() :: any().
 
 -callback description() -> [proplists:property()].
 
 %% Are the parameters valid for this mode?
 -callback validate_policy(params()) ->
     rabbit_policy_validator:validate_results().
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{description, 0}, {suggested_queue_nodes, 5}, {validate_policy, 1}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 93%
rename from rabbitmq-server/src/rabbit_mirror_queue_mode_exactly.erl
rename to deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl
index 0c0b7a10e8b03c3a097b26592b4551265e7d0533..593f0a4138bb6bbee274797a55faa5bbd68dd44a 100644 (file)
@@ -45,9 +45,7 @@ suggested_queue_nodes(Count, MNode, SNodes, _SSNodes, Poss) ->
             end}.
 
 shuffle(L) ->
-    {A1,A2,A3} = now(),
-    random:seed(A1, A2, A3),
-    {_, L1} = lists:unzip(lists:keysort(1, [{random:uniform(), N} || N <- L])),
+    {_, L1} = lists:unzip(lists:keysort(1, [{rand_compat:uniform(), N} || N <- L])),
     L1.
 
 validate_policy(N) when is_integer(N) andalso N > 0 ->
similarity index 65%
rename from rabbitmq-server/src/rabbit_mirror_queue_mode_nodes.erl
rename to deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl
index e63f3403739d6f0be6c0e3de33a816cb3aa4f15c..31c55722a5c533e9e177c868d80feb09508dd547 100644 (file)
 description() ->
     [{description, <<"Mirror queue to specified nodes">>}].
 
-suggested_queue_nodes(Nodes0, MNode, _SNodes, SSNodes, Poss) ->
-    Nodes1 = [list_to_atom(binary_to_list(Node)) || Node <- Nodes0],
+suggested_queue_nodes(PolicyNodes0, CurrentMaster, _SNodes, SSNodes, NodesRunningRabbitMQ) ->
+    PolicyNodes1 = [list_to_atom(binary_to_list(Node)) || Node <- PolicyNodes0],
     %% If the current master is not in the nodes specified, then what we want
     %% to do depends on whether there are any synchronised slaves. If there
     %% are then we can just kill the current master - the admin has asked for
     %% a migration and we should give it to them. If there are not however
     %% then we must keep the master around so as not to lose messages.
-    Nodes = case SSNodes of
-                [] -> lists:usort([MNode | Nodes1]);
-                _  -> Nodes1
-            end,
-    Unavailable = Nodes -- Poss,
-    Available = Nodes -- Unavailable,
-    case Available of
+
+    PolicyNodes = case SSNodes of
+                      [] -> lists:usort([CurrentMaster | PolicyNodes1]);
+                      _  -> PolicyNodes1
+                  end,
+    Unavailable = PolicyNodes -- NodesRunningRabbitMQ,
+    AvailablePolicyNodes = PolicyNodes -- Unavailable,
+    case AvailablePolicyNodes of
         [] -> %% We have never heard of anything? Not much we can do but
               %% keep the master alive.
-              {MNode, []};
-        _  -> case lists:member(MNode, Available) of
-                  true  -> {MNode, Available -- [MNode]};
+              {CurrentMaster, []};
+        _  -> case lists:member(CurrentMaster, AvailablePolicyNodes) of
+                  true  -> {CurrentMaster,
+                            AvailablePolicyNodes -- [CurrentMaster]};
                   false -> %% Make sure the new master is synced! In order to
                            %% get here SSNodes must not be empty.
-                           [NewMNode | _] = SSNodes,
-                           {NewMNode, Available -- [NewMNode]}
+                           SyncPolicyNodes = [Node ||
+                                              Node <- AvailablePolicyNodes,
+                                              lists:member(Node, SSNodes)],
+                           NewMaster = case SyncPolicyNodes of
+                                          [Node | _] -> Node;
+                                          []         -> erlang:hd(SSNodes)
+                                      end,
+                           {NewMaster, AvailablePolicyNodes -- [NewMaster]}
               end
     end.
 
similarity index 92%
rename from rabbitmq-server/src/rabbit_mirror_queue_slave.erl
rename to deps/rabbit/src/rabbit_mirror_queue_slave.erl
index 7f309ab0b79e0969928a9bcebdbefd19e595fdf6..6017e5a02838f30d78c5304de539a122a3eb5ace 100644 (file)
@@ -120,7 +120,7 @@ handle_go(Q = #amqqueue{name = QName}) ->
                    Self, {rabbit_amqqueue, set_ram_duration_target, [Self]}),
             {ok, BQ} = application:get_env(backing_queue_module),
             Q1 = Q #amqqueue { pid = QPid },
-            ok = rabbit_queue_index:erase(QName), %% For crash recovery
+            _ = BQ:delete_crashed(Q), %% For crash recovery
             BQS = bq_init(BQ, Q1, new),
             State = #state { q                   = Q1,
                              gm                  = GM,
@@ -225,9 +225,15 @@ handle_call({gm_deaths, DeadGMPids}, From,
                 _ ->
                     %% master has changed to not us
                     gen_server2:reply(From, ok),
-                    %% assertion, we don't need to add_mirrors/2 in this
-                    %% branch, see last clause in remove_from_queue/2
-                    [] = ExtraNodes,
+                    %% see rabbitmq-server#914;
+                    %% It's not always guaranteed that we won't have ExtraNodes.
+                    %% If gm alters, master can change to not us with extra nodes,
+                    %% in which case we attempt to add mirrors on those nodes.
+                    case ExtraNodes of
+                        [] -> void;
+                        _  -> rabbit_mirror_queue_misc:add_mirrors(
+                                QName, ExtraNodes, async)
+                    end,
                     %% Since GM is by nature lazy we need to make sure
                     %% there is some traffic when a master dies, to
                     %% make sure all slaves get informed of the
@@ -250,19 +256,29 @@ handle_cast(go, {not_started, Q} = NotStarted) ->
 handle_cast({run_backing_queue, Mod, Fun}, State) ->
     noreply(run_backing_queue(Mod, Fun, State));
 
-handle_cast({gm, Instruction}, State) ->
-    handle_process_result(process_instruction(Instruction, State));
+handle_cast({gm, Instruction}, State = #state{q = #amqqueue { name = QName }}) ->
+    case rabbit_amqqueue:lookup(QName) of
+       {ok, #amqqueue{slave_pids = SPids}} ->
+           case lists:member(self(), SPids) of
+               true ->
+                   handle_process_result(process_instruction(Instruction, State));
+               false ->
+                   %% Potentially a duplicated slave caused by a partial partition,
+                   %% will stop as a new slave could start unaware of our presence
+                   {stop, shutdown, State}
+           end;
+       {error, not_found} ->
+           %% Would not expect this to happen after fixing #953
+           {stop, shutdown, State}
+    end;
 
 handle_cast({deliver, Delivery = #delivery{sender = Sender, flow = Flow}, true},
             State) ->
     %% Asynchronous, non-"mandatory", deliver mode.
-    case Flow of
-        %% We are acking messages to the channel process that sent us
-        %% the message delivery. See
-        %% rabbit_amqqueue_process:handle_ch_down for more info.
-        flow   -> credit_flow:ack(Sender);
-        noflow -> ok
-    end,
+    %% We are acking messages to the channel process that sent us
+    %% the message delivery. See
+    %% rabbit_amqqueue_process:handle_ch_down for more info.
+    maybe_flow_ack(Sender, Flow),
     noreply(maybe_enqueue_message(Delivery, State));
 
 handle_cast({sync_start, Ref, Syncer},
@@ -298,7 +314,12 @@ handle_cast({set_ram_duration_target, Duration},
             State = #state { backing_queue       = BQ,
                              backing_queue_state = BQS }) ->
     BQS1 = BQ:set_ram_duration_target(Duration, BQS),
-    noreply(State #state { backing_queue_state = BQS1 }).
+    noreply(State #state { backing_queue_state = BQS1 });
+
+handle_cast(policy_changed, State) ->
+    %% During partial partitions, we might end up receiving messages expected by a master
+    %% Ignore them
+    noreply(State).
 
 handle_info(update_ram_duration, State = #state{backing_queue       = BQ,
                                                 backing_queue_state = BQS}) ->
@@ -545,9 +566,8 @@ confirm_messages(MsgIds, State = #state { msg_id_status = MS }) ->
 handle_process_result({ok,   State}) -> noreply(State);
 handle_process_result({stop, State}) -> {stop, normal, State}.
 
--ifdef(use_specs).
--spec(promote_me/2 :: ({pid(), term()}, #state{}) -> no_return()).
--endif.
+-spec promote_me({pid(), term()}, #state{}) -> no_return().
+
 promote_me(From, #state { q                   = Q = #amqqueue { name = QName },
                           gm                  = GM,
                           backing_queue       = BQ,
@@ -658,10 +678,7 @@ promote_me(From, #state { q                   = Q = #amqqueue { name = QName },
 %% need to send an ack for these messages since the channel is waiting
 %% for one for the via-GM case and we will not now receive one.
 promote_delivery(Delivery = #delivery{sender = Sender, flow = Flow}) ->
-    case Flow of
-        flow   -> credit_flow:ack(Sender);
-        noflow -> ok
-    end,
+    maybe_flow_ack(Sender, Flow),
     Delivery#delivery{mandatory = false}.
 
 noreply(State) ->
@@ -747,6 +764,7 @@ confirm_sender_death(Pid) ->
 
 forget_sender(_, running)                        -> false;
 forget_sender(down_from_gm, down_from_gm)        -> false; %% [1]
+forget_sender(down_from_ch, down_from_ch)        -> false;
 forget_sender(Down1, Down2) when Down1 =/= Down2 -> true.
 
 %% [1] If another slave goes through confirm_sender_death/1 before we
@@ -851,6 +869,15 @@ process_instruction({publish, ChPid, Flow, MsgProps,
         publish_or_discard(published, ChPid, MsgId, State),
     BQS1 = BQ:publish(Msg, MsgProps, true, ChPid, Flow, BQS),
     {ok, State1 #state { backing_queue_state = BQS1 }};
+process_instruction({batch_publish, ChPid, Flow, Publishes}, State) ->
+    maybe_flow_ack(ChPid, Flow),
+    State1 = #state { backing_queue = BQ, backing_queue_state = BQS } =
+        lists:foldl(fun ({#basic_message { id = MsgId },
+                          _MsgProps, _IsDelivered}, St) ->
+                            publish_or_discard(published, ChPid, MsgId, St)
+                    end, State, Publishes),
+    BQS1 = BQ:batch_publish(Publishes, ChPid, Flow, BQS),
+    {ok, State1 #state { backing_queue_state = BQS1 }};
 process_instruction({publish_delivered, ChPid, Flow, MsgProps,
                      Msg = #basic_message { id = MsgId }}, State) ->
     maybe_flow_ack(ChPid, Flow),
@@ -860,6 +887,24 @@ process_instruction({publish_delivered, ChPid, Flow, MsgProps,
     {AckTag, BQS1} = BQ:publish_delivered(Msg, MsgProps, ChPid, Flow, BQS),
     {ok, maybe_store_ack(true, MsgId, AckTag,
                          State1 #state { backing_queue_state = BQS1 })};
+process_instruction({batch_publish_delivered, ChPid, Flow, Publishes}, State) ->
+    maybe_flow_ack(ChPid, Flow),
+    {MsgIds,
+     State1 = #state { backing_queue = BQ, backing_queue_state = BQS }} =
+        lists:foldl(fun ({#basic_message { id = MsgId }, _MsgProps},
+                         {MsgIds, St}) ->
+                            {[MsgId | MsgIds],
+                             publish_or_discard(published, ChPid, MsgId, St)}
+                    end, {[], State}, Publishes),
+    true = BQ:is_empty(BQS),
+    {AckTags, BQS1} = BQ:batch_publish_delivered(Publishes, ChPid, Flow, BQS),
+    MsgIdsAndAcks = lists:zip(lists:reverse(MsgIds), AckTags),
+    State2 = lists:foldl(
+               fun ({MsgId, AckTag}, St) ->
+                       maybe_store_ack(true, MsgId, AckTag, St)
+               end, State1 #state { backing_queue_state = BQS1 },
+               MsgIdsAndAcks),
+    {ok, State2};
 process_instruction({discard, ChPid, Flow, MsgId}, State) ->
     maybe_flow_ack(ChPid, Flow),
     State1 = #state { backing_queue = BQ, backing_queue_state = BQS } =
@@ -921,10 +966,15 @@ process_instruction({delete_and_terminate, Reason},
                     State = #state { backing_queue       = BQ,
                                      backing_queue_state = BQS }) ->
     BQ:delete_and_terminate(Reason, BQS),
-    {stop, State #state { backing_queue_state = undefined }}.
+    {stop, State #state { backing_queue_state = undefined }};
+process_instruction({set_queue_mode, Mode},
+                    State = #state { backing_queue       = BQ,
+                                     backing_queue_state = BQS }) ->
+    BQS1 = BQ:set_queue_mode(Mode, BQS),
+    {ok, State #state { backing_queue_state = BQS1 }}.
 
-maybe_flow_ack(ChPid, flow)    -> credit_flow:ack(ChPid);
-maybe_flow_ack(_ChPid, noflow) -> ok.
+maybe_flow_ack(Sender, flow)    -> credit_flow:ack(Sender);
+maybe_flow_ack(_Sender, noflow) -> ok.
 
 msg_ids_to_acktags(MsgIds, MA) ->
     {AckTags, MA1} =
similarity index 55%
rename from rabbitmq-server/src/rabbit_mirror_queue_sync.erl
rename to deps/rabbit/src/rabbit_mirror_queue_sync.erl
index 9a8d55f94bdb0db41c4b09ce828d07b61eeeb5d3..c438e91a3f5b28e1d3a1b142989a0c33689d322d 100644 (file)
@@ -18,7 +18,7 @@
 
 -include("rabbit.hrl").
 
--export([master_prepare/4, master_go/7, slave/7]).
+-export([master_prepare/4, master_go/8, slave/7, conserve_resources/3]).
 
 -define(SYNC_PROGRESS_INTERVAL, 1000000).
 
 %%                 || <--- ready ---- ||                      ||
 %%                 || <--- next* ---- ||                      ||  }
 %%                 || ---- msg* ----> ||                      ||  } loop
-%%                 ||                 || ---- sync_msg* ----> ||  }
+%%                 ||                 || ---- sync_msgs* ---> ||  }
 %%                 ||                 || <--- (credit)* ----- ||  }
 %%                 || <--- next  ---- ||                      ||
 %%                 || ---- done ----> ||                      ||
 %%                 ||                 || -- sync_complete --> ||
 %%                 ||               (Dies)                    ||
 
--ifdef(use_specs).
+-type log_fun() :: fun ((string(), [any()]) -> 'ok').
+-type bq() :: atom().
+-type bqs() :: any().
+-type ack() :: any().
+-type slave_sync_state() :: {[{rabbit_types:msg_id(), ack()}], timer:tref(),
+                             bqs()}.
 
--type(log_fun() :: fun ((string(), [any()]) -> 'ok')).
--type(bq() :: atom()).
--type(bqs() :: any()).
--type(ack() :: any()).
--type(slave_sync_state() :: {[{rabbit_types:msg_id(), ack()}], timer:tref(),
-                             bqs()}).
-
--spec(master_prepare/4 :: (reference(), rabbit_amqqueue:name(),
-                               log_fun(), [pid()]) -> pid()).
--spec(master_go/7 :: (pid(), reference(), log_fun(),
+-spec master_prepare(reference(), rabbit_amqqueue:name(),
+                               log_fun(), [pid()]) -> pid().
+-spec master_go(pid(), reference(), log_fun(),
                       rabbit_mirror_queue_master:stats_fun(),
                       rabbit_mirror_queue_master:stats_fun(),
+                      non_neg_integer(),
                       bq(), bqs()) ->
                           {'already_synced', bqs()} | {'ok', bqs()} |
                           {'shutdown', any(), bqs()} |
-                          {'sync_died', any(), bqs()}).
--spec(slave/7 :: (non_neg_integer(), reference(), timer:tref(), pid(),
+                          {'sync_died', any(), bqs()}.
+-spec slave(non_neg_integer(), reference(), timer:tref(), pid(),
                   bq(), bqs(), fun((bq(), bqs()) -> {timer:tref(), bqs()})) ->
                       'denied' |
                       {'ok' | 'failed', slave_sync_state()} |
-                      {'stop', any(), slave_sync_state()}).
-
--endif.
+                      {'stop', any(), slave_sync_state()}.
 
 %% ---------------------------------------------------------------------------
 %% Master
@@ -88,46 +85,65 @@ master_prepare(Ref, QName, Log, SPids) ->
                        syncer(Ref, Log, MPid, SPids)
                end).
 
-master_go(Syncer, Ref, Log, HandleInfo, EmitStats, BQ, BQS) ->
+master_go(Syncer, Ref, Log, HandleInfo, EmitStats, SyncBatchSize, BQ, BQS) ->
     Args = {Syncer, Ref, Log, HandleInfo, EmitStats, rabbit_misc:get_parent()},
     receive
         {'EXIT', Syncer, normal} -> {already_synced, BQS};
         {'EXIT', Syncer, Reason} -> {sync_died, Reason, BQS};
         {ready, Syncer}          -> EmitStats({syncing, 0}),
-                                    master_go0(Args, BQ, BQS)
+                                    master_batch_go0(Args, SyncBatchSize,
+                                                     BQ, BQS)
     end.
 
-master_go0(Args, BQ, BQS) ->
-    case BQ:fold(fun (Msg, MsgProps, Unacked, Acc) ->
-                         master_send(Msg, MsgProps, Unacked, Args, Acc)
-                 end, {0, erlang:now()}, BQS) of
+master_batch_go0(Args, BatchSize, BQ, BQS) ->
+    FoldFun =
+        fun (Msg, MsgProps, Unacked, Acc) ->
+                Acc1 = append_to_acc(Msg, MsgProps, Unacked, Acc),
+                case maybe_master_batch_send(Acc1, BatchSize) of
+                    true  -> master_batch_send(Args, Acc1);
+                    false -> {cont, Acc1}
+                end
+        end,
+    FoldAcc = {[], 0, {0, BQ:depth(BQS)}, time_compat:monotonic_time()},
+    bq_fold(FoldFun, FoldAcc, Args, BQ, BQS).
+
+master_batch_send({Syncer, Ref, Log, HandleInfo, EmitStats, Parent},
+                  {Batch, I, {Curr, Len}, Last}) ->
+    T = maybe_emit_stats(Last, I, EmitStats, Log),
+    HandleInfo({syncing, I}),
+    handle_set_maximum_since_use(),
+    SyncMsg = {msgs, Ref, lists:reverse(Batch)},
+    NewAcc = {[], I + length(Batch), {Curr, Len}, T},
+    master_send_receive(SyncMsg, NewAcc, Syncer, Ref, Parent).
+
+%% Either send messages when we reach the last one in the queue or
+%% whenever we have accumulated BatchSize messages.
+maybe_master_batch_send({_, _, {Len, Len}, _}, _BatchSize) ->
+    true;
+maybe_master_batch_send({_, _, {Curr, _Len}, _}, BatchSize)
+  when Curr rem BatchSize =:= 0 ->
+    true;
+maybe_master_batch_send(_Acc, _BatchSize) ->
+    false.
+
+bq_fold(FoldFun, FoldAcc, Args, BQ, BQS) ->
+    case BQ:fold(FoldFun, FoldAcc, BQS) of
         {{shutdown,  Reason}, BQS1} -> {shutdown,  Reason, BQS1};
         {{sync_died, Reason}, BQS1} -> {sync_died, Reason, BQS1};
         {_,                   BQS1} -> master_done(Args, BQS1)
     end.
 
-master_send(Msg, MsgProps, Unacked,
-            {Syncer, Ref, Log, HandleInfo, EmitStats, Parent}, {I, Last}) ->
-    T = case timer:now_diff(erlang:now(), Last) > ?SYNC_PROGRESS_INTERVAL of
-            true  -> EmitStats({syncing, I}),
-                     Log("~p messages", [I]),
-                     erlang:now();
-            false -> Last
-        end,
-    HandleInfo({syncing, I}),
-    receive
-        {'$gen_cast', {set_maximum_since_use, Age}} ->
-            ok = file_handle_cache:set_maximum_since_use(Age)
-    after 0 ->
-            ok
-    end,
+append_to_acc(Msg, MsgProps, Unacked, {Batch, I, {Curr, Len}, T}) ->
+    {[{Msg, MsgProps, Unacked} | Batch], I, {Curr + 1, Len}, T}.
+
+master_send_receive(SyncMsg, NewAcc, Syncer, Ref, Parent) ->
     receive
         {'$gen_call', From,
          cancel_sync_mirrors}    -> stop_syncer(Syncer, {cancel, Ref}),
                                     gen_server2:reply(From, ok),
                                     {stop, cancelled};
-        {next, Ref}              -> Syncer ! {msg, Ref, Msg, MsgProps, Unacked},
-                                    {cont, {I + 1, T}};
+        {next, Ref}              -> Syncer ! SyncMsg,
+                                    {cont, NewAcc};
         {'EXIT', Parent, Reason} -> {stop, {shutdown,  Reason}};
         {'EXIT', Syncer, Reason} -> {stop, {sync_died, Reason}}
     end.
@@ -147,6 +163,24 @@ stop_syncer(Syncer, Msg) ->
     after 0 -> ok
     end.
 
+maybe_emit_stats(Last, I, EmitStats, Log) ->
+    Interval = time_compat:convert_time_unit(
+                 time_compat:monotonic_time() - Last, native, micro_seconds),
+    case Interval > ?SYNC_PROGRESS_INTERVAL of
+        true  -> EmitStats({syncing, I}),
+                 Log("~p messages", [I]),
+                 time_compat:monotonic_time();
+        false -> Last
+    end.
+
+handle_set_maximum_since_use() ->
+    receive
+        {'$gen_cast', {set_maximum_since_use, Age}} ->
+            ok = file_handle_cache:set_maximum_since_use(Age)
+    after 0 ->
+            ok
+    end.
+
 %% Master
 %% ---------------------------------------------------------------------------
 %% Syncer
@@ -160,7 +194,7 @@ syncer(Ref, Log, MPid, SPids) ->
         []     -> Log("all slaves already synced", []);
         SPids1 -> MPid ! {ready, self()},
                   Log("mirrors ~p to sync", [[node(SPid) || SPid <- SPids1]]),
-                  syncer_loop(Ref, MPid, SPids1)
+                  syncer_check_resources(Ref, MPid, SPids1)
     end.
 
 await_slaves(Ref, SPids) ->
@@ -179,16 +213,50 @@ await_slaves(Ref, SPids) ->
 %% 'sync_start' and so will not reply. We need to act as though they are
 %% down.
 
+syncer_check_resources(Ref, MPid, SPids) ->
+    rabbit_alarm:register(self(), {?MODULE, conserve_resources, []}),
+    %% Before we ask the master node to send the first batch of messages
+    %% over here, we check if one node is already short on memory. If
+    %% that's the case, we wait for the alarm to be cleared before
+    %% starting the syncer loop.
+    AlarmedNodes = lists:any(
+      fun
+          ({{resource_limit, memory, _}, _}) -> true;
+          ({_, _})                           -> false
+      end, rabbit_alarm:get_alarms()),
+    if
+        not AlarmedNodes ->
+            MPid ! {next, Ref},
+            syncer_loop(Ref, MPid, SPids);
+        true ->
+            case wait_for_resources(Ref, SPids) of
+                cancel -> ok;
+                SPids1 -> MPid ! {next, Ref},
+                          syncer_loop(Ref, MPid, SPids1)
+            end
+    end.
+
 syncer_loop(Ref, MPid, SPids) ->
-    MPid ! {next, Ref},
     receive
-        {msg, Ref, Msg, MsgProps, Unacked} ->
+        {conserve_resources, memory, true} ->
+            case wait_for_resources(Ref, SPids) of
+                cancel -> ok;
+                SPids1 -> syncer_loop(Ref, MPid, SPids1)
+            end;
+        {conserve_resources, _, _} ->
+            %% Ignore other alerts.
+            syncer_loop(Ref, MPid, SPids);
+        {msgs, Ref, Msgs} ->
             SPids1 = wait_for_credit(SPids),
-            [begin
-                 credit_flow:send(SPid),
-                 SPid ! {sync_msg, Ref, Msg, MsgProps, Unacked}
-             end || SPid <- SPids1],
-            syncer_loop(Ref, MPid, SPids1);
+            case SPids1 of
+                [] ->
+                    % Die silently because there are no slaves left.
+                    ok;
+                _  ->
+                    broadcast(SPids1, {sync_msgs, Ref, Msgs}),
+                    MPid ! {next, Ref},
+                    syncer_loop(Ref, MPid, SPids1)
+            end;
         {cancel, Ref} ->
             %% We don't tell the slaves we will die - so when we do
             %% they interpret that as a failure, which is what we
@@ -198,6 +266,16 @@ syncer_loop(Ref, MPid, SPids) ->
             [SPid ! {sync_complete, Ref} || SPid <- SPids]
     end.
 
+broadcast(SPids, Msg) ->
+    [begin
+         credit_flow:send(SPid),
+         SPid ! Msg
+     end || SPid <- SPids].
+
+conserve_resources(Pid, Source, {_, Conserve, _}) ->
+    Pid ! {conserve_resources, Source, Conserve},
+    ok.
+
 wait_for_credit(SPids) ->
     case credit_flow:blocked() of
         true  -> receive
@@ -211,6 +289,24 @@ wait_for_credit(SPids) ->
         false -> SPids
     end.
 
+wait_for_resources(Ref, SPids) ->
+    receive
+        {conserve_resources, memory, false} ->
+            SPids;
+        {conserve_resources, _, _} ->
+            %% Ignore other alerts.
+            wait_for_resources(Ref, SPids);
+        {cancel, Ref} ->
+            %% We don't tell the slaves we will die - so when we do
+            %% they interpret that as a failure, which is what we
+            %% want.
+            cancel;
+        {'DOWN', _, process, SPid, _} ->
+            credit_flow:peer_down(SPid),
+            SPids1 = wait_for_credit(lists:delete(SPid, SPids)),
+            wait_for_resources(Ref, SPids1)
+    end.
+
 %% Syncer
 %% ---------------------------------------------------------------------------
 %% Slave
@@ -258,17 +354,9 @@ slave_sync_loop(Args = {Ref, MRef, Syncer, BQ, UpdateRamDuration, Parent},
         update_ram_duration ->
             {TRef1, BQS1} = UpdateRamDuration(BQ, BQS),
             slave_sync_loop(Args, {MA, TRef1, BQS1});
-        {sync_msg, Ref, Msg, Props, Unacked} ->
+        {sync_msgs, Ref, Batch} ->
             credit_flow:ack(Syncer),
-            Props1 = Props#message_properties{needs_confirming = false},
-            {MA1, BQS1} =
-                case Unacked of
-                    false -> {MA,
-                              BQ:publish(Msg, Props1, true, none, noflow, BQS)};
-                    true  -> {AckTag, BQS2} = BQ:publish_delivered(
-                                                Msg, Props1, none, noflow, BQS),
-                             {[{Msg#basic_message.id, AckTag} | MA], BQS2}
-                end,
+            {MA1, BQS1} = process_batch(Batch, MA, BQ, BQS),
             slave_sync_loop(Args, {MA1, TRef, BQS1});
         {'EXIT', Parent, Reason} ->
             {stop, Reason, State};
@@ -277,3 +365,52 @@ slave_sync_loop(Args = {Ref, MRef, Syncer, BQ, UpdateRamDuration, Parent},
             BQ:delete_and_terminate(Reason, BQS),
             {stop, Reason, {[], TRef, undefined}}
     end.
+
+%% We are partitioning messages by the Unacked element in the tuple.
+%% when unacked = true, then it's a publish_delivered message,
+%% otherwise it's a publish message.
+%%
+%% Note that we can't first partition the batch and then publish each
+%% part, since that would result in re-ordering messages, which we
+%% don't want to do.
+process_batch([], MA, _BQ, BQS) ->
+    {MA, BQS};
+process_batch(Batch, MA, BQ, BQS) ->
+    {_Msg, _MsgProps, Unacked} = hd(Batch),
+    process_batch(Batch, Unacked, [], MA, BQ, BQS).
+
+process_batch([{Msg, Props, true = Unacked} | Rest], true = Unacked,
+              Acc, MA, BQ, BQS) ->
+    %% publish_delivered messages don't need the IsDelivered flag,
+    %% therefore we just add {Msg, Props} to the accumulator.
+    process_batch(Rest, Unacked, [{Msg, props(Props)} | Acc],
+                  MA, BQ, BQS);
+process_batch([{Msg, Props, false = Unacked} | Rest], false = Unacked,
+              Acc, MA, BQ, BQS) ->
+    %% publish messages needs the IsDelivered flag which is set to true
+    %% here.
+    process_batch(Rest, Unacked, [{Msg, props(Props), true} | Acc],
+                  MA, BQ, BQS);
+process_batch(Batch, Unacked, Acc, MA, BQ, BQS) ->
+    {MA1, BQS1} = publish_batch(Unacked, lists:reverse(Acc), MA, BQ, BQS),
+    process_batch(Batch, MA1, BQ, BQS1).
+
+%% Unacked msgs are published via batch_publish.
+publish_batch(false, Batch, MA, BQ, BQS) ->
+    batch_publish(Batch, MA, BQ, BQS);
+%% Acked msgs are published via batch_publish_delivered.
+publish_batch(true, Batch, MA, BQ, BQS) ->
+    batch_publish_delivered(Batch, MA, BQ, BQS).
+
+
+batch_publish(Batch, MA, BQ, BQS) ->
+    BQS1 = BQ:batch_publish(Batch, none, noflow, BQS),
+    {MA, BQS1}.
+
+batch_publish_delivered(Batch, MA, BQ, BQS) ->
+    {AckTags, BQS1} = BQ:batch_publish_delivered(Batch, none, noflow, BQS),
+    MA1 = BQ:zip_msgs_and_acks(Batch, AckTags, MA, BQS1),
+    {MA1, BQS1}.
+
+props(Props) ->
+    Props#message_properties{needs_confirming = false}.
similarity index 83%
rename from rabbitmq-server/src/rabbit_mnesia.erl
rename to deps/rabbit/src/rabbit_mnesia.erl
index d480bef7caf6ee7ce8155a3f8568bfc9e4fcc641..596eb62b031073d43067babc5df91c817cd005cb 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mnesia).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([node_type/0, cluster_status/0]).
 
--type(node_type() :: disc | ram).
--type(cluster_status() :: {[node()], [node()], [node()]}).
+-type node_type() :: disc | ram.
+-type cluster_status() :: {[node()], [node()], [node()]}.
 
 %% Main interface
--spec(init/0 :: () -> 'ok').
--spec(join_cluster/2 :: (node(), node_type())
-                        -> 'ok' | {'ok', 'already_member'}).
--spec(reset/0 :: () -> 'ok').
--spec(force_reset/0 :: () -> 'ok').
--spec(update_cluster_nodes/1 :: (node()) -> 'ok').
--spec(change_cluster_node_type/1 :: (node_type()) -> 'ok').
--spec(forget_cluster_node/2 :: (node(), boolean()) -> 'ok').
--spec(force_load_next_boot/0 :: () -> 'ok').
+-spec init() -> 'ok'.
+-spec join_cluster(node(), node_type())
+                        -> ok | {ok, already_member} | {error, {inconsistent_cluster, string()}}.
+-spec reset() -> 'ok'.
+-spec force_reset() -> 'ok'.
+-spec update_cluster_nodes(node()) -> 'ok'.
+-spec change_cluster_node_type(node_type()) -> 'ok'.
+-spec forget_cluster_node(node(), boolean()) -> 'ok'.
+-spec force_load_next_boot() -> 'ok'.
 
 %% Various queries to get the status of the db
--spec(status/0 :: () -> [{'nodes', [{node_type(), [node()]}]} |
+-spec status() -> [{'nodes', [{node_type(), [node()]}]} |
                          {'running_nodes', [node()]} |
-                         {'partitions', [{node(), [node()]}]}]).
--spec(is_clustered/0 :: () -> boolean()).
--spec(on_running_node/1 :: (pid()) -> boolean()).
--spec(is_process_alive/1 :: (pid()) -> boolean()).
--spec(cluster_nodes/1 :: ('all' | 'disc' | 'ram' | 'running') -> [node()]).
--spec(node_type/0 :: () -> node_type()).
--spec(dir/0 :: () -> file:filename()).
--spec(cluster_status_from_mnesia/0 :: () -> rabbit_types:ok_or_error2(
-                                              cluster_status(), any())).
+                         {'partitions', [{node(), [node()]}]}].
+-spec is_clustered() -> boolean().
+-spec on_running_node(pid()) -> boolean().
+-spec is_process_alive(pid()) -> boolean().
+-spec cluster_nodes('all' | 'disc' | 'ram' | 'running') -> [node()].
+-spec node_type() -> node_type().
+-spec dir() -> file:filename().
+-spec cluster_status_from_mnesia() -> rabbit_types:ok_or_error2(
+                                              cluster_status(), any()).
 
 %% Operations on the db and utils, mainly used in `rabbit_upgrade' and `rabbit'
--spec(init_db_unchecked/2 :: ([node()], node_type()) -> 'ok').
--spec(copy_db/1 :: (file:filename()) ->  rabbit_types:ok_or_error(any())).
--spec(check_cluster_consistency/0 :: () -> 'ok').
--spec(ensure_mnesia_dir/0 :: () -> 'ok').
+-spec init_db_unchecked([node()], node_type()) -> 'ok'.
+-spec copy_db(file:filename()) ->  rabbit_types:ok_or_error(any()).
+-spec check_cluster_consistency() -> 'ok'.
+-spec ensure_mnesia_dir() -> 'ok'.
 
 %% Hooks used in `rabbit_node_monitor'
--spec(on_node_up/1 :: (node()) -> 'ok').
--spec(on_node_down/1 :: (node()) -> 'ok').
-
--endif.
+-spec on_node_up(node()) -> 'ok'.
+-spec on_node_down(node()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 %% Main interface
@@ -101,10 +97,14 @@ init() ->
     ensure_mnesia_running(),
     ensure_mnesia_dir(),
     case is_virgin_node() of
-        true  -> init_from_config();
-        false -> NodeType = node_type(),
-                 init_db_and_upgrade(cluster_nodes(all), NodeType,
-                                     NodeType =:= ram)
+        true  ->
+            rabbit_log:info("Database directory at ~s is empty. Initialising from scratch...~n",
+                            [dir()]),
+            init_from_config();
+        false ->
+            NodeType = node_type(),
+            init_db_and_upgrade(cluster_nodes(all), NodeType,
+                                NodeType =:= ram)
     end,
     %% We intuitively expect the global name server to be synced when
     %% Mnesia is up. In fact that's not guaranteed to be the case -
@@ -200,8 +200,17 @@ join_cluster(DiscoveryNode, NodeType) ->
                     {error, Reason}
             end;
         true ->
-            rabbit_log:info("Already member of cluster: ~p~n", [ClusterNodes]),
-            {ok, already_member}
+            %% DiscoveryNode thinks that we are part of a cluster, but
+            %% do we think so ourselves?
+            case are_we_clustered_with(DiscoveryNode) of
+                true ->
+                    rabbit_log:info("Asked to join a cluster but already a member of it: ~p~n", [ClusterNodes]),
+                    {ok, already_member};
+                false ->
+                    Msg = format_inconsistent_cluster_message(DiscoveryNode, node()),
+                    rabbit_log:error(Msg),
+                    {error, {inconsistent_cluster, Msg}}
+            end
     end.
 
 %% return node to its virgin state, where it is not member of any
@@ -419,6 +428,7 @@ cluster_status(WhichNodes) ->
 
 node_info() ->
     {rabbit_misc:otp_release(), rabbit_misc:version(),
+     mnesia:system_info(protocol_version),
      cluster_status_from_mnesia()}.
 
 node_type() ->
@@ -589,26 +599,37 @@ check_cluster_consistency() ->
     end.
 
 check_cluster_consistency(Node, CheckNodesConsistency) ->
-    case rpc:call(Node, rabbit_mnesia, node_info, []) of
+    case remote_node_info(Node) of
         {badrpc, _Reason} ->
             {error, not_found};
-        {_OTP, _Rabbit, {error, _}} ->
+        {_OTP, Rabbit, DelegateModuleHash, _Status} when is_binary(DelegateModuleHash) ->
+            %% when a delegate module .beam file hash is present
+            %% in the tuple, we are dealing with an old version
+            rabbit_version:version_error("Rabbit", rabbit_misc:version(), Rabbit);
+        {_OTP, _Rabbit, _Protocol, {error, _}} ->
             {error, not_found};
-        {OTP, Rabbit, {ok, Status}} when CheckNodesConsistency ->
-            case check_consistency(OTP, Rabbit, Node, Status) of
+        {OTP, Rabbit, Protocol, {ok, Status}} when CheckNodesConsistency ->
+            case check_consistency(Node, OTP, Rabbit, Protocol, Status) of
                 {error, _} = E -> E;
                 {ok, Res}      -> {ok, Res}
             end;
-        {OTP, Rabbit, {ok, Status}} ->
-            case check_consistency(OTP, Rabbit) of
+        {OTP, Rabbit, Protocol, {ok, Status}} ->
+            case check_consistency(Node, OTP, Rabbit, Protocol) of
                 {error, _} = E -> E;
                 ok             -> {ok, Status}
-            end;
-        {_OTP, Rabbit, _Hash, _Status} ->
-            %% delegate hash checking implies version mismatch
-            version_error("Rabbit", rabbit_misc:version(), Rabbit)
+            end
+    end.
+
+remote_node_info(Node) ->
+    case rpc:call(Node, rabbit_mnesia, node_info, []) of
+        {badrpc, _} = Error   -> Error;
+        %% RabbitMQ prior to 3.6.2
+        {OTP, Rabbit, Status} -> {OTP, Rabbit, unsupported, Status};
+        %% RabbitMQ 3.6.2 or later
+        {OTP, Rabbit, Protocol, Status} -> {OTP, Rabbit, Protocol, Status}
     end.
 
+
 %%--------------------------------------------------------------------
 %% Hooks for `rabbit_node_monitor'
 %%--------------------------------------------------------------------
@@ -759,14 +780,14 @@ change_extra_db_nodes(ClusterNodes0, CheckOtherNodes) ->
             Nodes
     end.
 
-check_consistency(OTP, Rabbit) ->
+check_consistency(Node, OTP, Rabbit, ProtocolVersion) ->
     rabbit_misc:sequence_error(
-      [check_otp_consistency(OTP),
+      [check_mnesia_or_otp_consistency(Node, ProtocolVersion, OTP),
        check_rabbit_consistency(Rabbit)]).
 
-check_consistency(OTP, Rabbit, Node, Status) ->
+check_consistency(Node, OTP, Rabbit, ProtocolVersion, Status) ->
     rabbit_misc:sequence_error(
-      [check_otp_consistency(OTP),
+      [check_mnesia_or_otp_consistency(Node, ProtocolVersion, OTP),
        check_rabbit_consistency(Rabbit),
        check_nodes_consistency(Node, Status)]).
 
@@ -776,30 +797,60 @@ check_nodes_consistency(Node, RemoteStatus = {RemoteAllNodes, _, _}) ->
             {ok, RemoteStatus};
         false ->
             {error, {inconsistent_cluster,
-                     rabbit_misc:format("Node ~p thinks it's clustered "
-                                        "with node ~p, but ~p disagrees",
-                                        [node(), Node, Node])}}
+                     format_inconsistent_cluster_message(node(), Node)}}
     end.
 
-check_version_consistency(This, Remote, Name) ->
-    check_version_consistency(This, Remote, Name, fun (A, B) -> A =:= B end).
-
-check_version_consistency(This, Remote, Name, Comp) ->
-    case Comp(This, Remote) of
-        true  -> ok;
-        false -> version_error(Name, This, Remote)
+check_mnesia_or_otp_consistency(_Node, unsupported, OTP) ->
+    rabbit_version:check_otp_consistency(OTP);
+check_mnesia_or_otp_consistency(Node, ProtocolVersion, _) ->
+    check_mnesia_consistency(Node, ProtocolVersion).
+
+check_mnesia_consistency(Node, ProtocolVersion) ->
+    % If mnesia is running we will just check protocol version
+    % If it's not running, we don't want it to join cluster until all checks pass
+    % so we start it without `dir` env variable to prevent
+    % joining cluster and/or corrupting data
+    with_running_or_clean_mnesia(fun() ->
+        case negotiate_protocol([Node]) of
+            [Node] -> ok;
+            []     ->
+                LocalVersion = mnesia:system_info(protocol_version),
+                {error, {inconsistent_cluster,
+                         rabbit_misc:format("Mnesia protocol negotiation failed."
+                                            " Local version: ~p."
+                                            " Remote version ~p",
+                                            [LocalVersion, ProtocolVersion])}}
+        end
+    end).
+
+negotiate_protocol([Node]) ->
+    mnesia_monitor:negotiate_protocol([Node]).
+
+with_running_or_clean_mnesia(Fun) ->
+    IsMnesiaRunning = case mnesia:system_info(is_running) of
+        yes      -> true;
+        no       -> false;
+        stopping ->
+            ensure_mnesia_not_running(),
+            false;
+        starting ->
+            ensure_mnesia_running(),
+            true
+    end,
+    case IsMnesiaRunning of
+        true  -> Fun();
+        false ->
+            {ok, MnesiaDir} = application:get_env(mnesia, dir),
+            application:unset_env(mnesia, dir),
+            mnesia:start(),
+            Result = Fun(),
+            application:stop(mnesia),
+            application:set_env(mnesia, dir, MnesiaDir),
+            Result
     end.
 
-version_error(Name, This, Remote) ->
-    {error, {inconsistent_cluster,
-             rabbit_misc:format("~s version mismatch: local node is ~s, "
-                                "remote node ~s", [Name, This, Remote])}}.
-
-check_otp_consistency(Remote) ->
-    check_version_consistency(rabbit_misc:otp_release(), Remote, "OTP").
-
 check_rabbit_consistency(Remote) ->
-    check_version_consistency(
+    rabbit_version:check_version_consistency(
       rabbit_misc:version(), Remote, "Rabbit",
       fun rabbit_misc:version_minor_equivalent/2).
 
@@ -832,22 +883,29 @@ find_auto_cluster_node([Node | Nodes]) ->
                      "Could not auto-cluster with ~s: " ++ Fmt, [Node | Args]),
                    find_auto_cluster_node(Nodes)
            end,
-    case rpc:call(Node, rabbit_mnesia, node_info, []) of
-        {badrpc, _} = Reason         -> Fail("~p~n", [Reason]);
+    case remote_node_info(Node) of
+        {badrpc, _} = Reason ->
+            Fail("~p~n", [Reason]);
         %% old delegate hash check
-        {_OTP, RMQ, _Hash, _}        -> Fail("version ~s~n", [RMQ]);
-        {_OTP, _RMQ, {error, _} = E} -> Fail("~p~n", [E]);
-        {OTP, RMQ, _}                -> case check_consistency(OTP, RMQ) of
-                                            {error, _} -> Fail("versions ~p~n",
-                                                               [{OTP, RMQ}]);
-                                            ok         -> {ok, Node}
-                                        end
+        {_OTP, RMQ, Hash, _} when is_binary(Hash) ->
+            Fail("version ~s~n", [RMQ]);
+        {_OTP, _RMQ, _Protocol, {error, _} = E} ->
+            Fail("~p~n", [E]);
+        {OTP, RMQ, Protocol, _} ->
+            case check_consistency(Node, OTP, RMQ, Protocol) of
+                {error, _} -> Fail("versions ~p~n",
+                                   [{OTP, RMQ}]);
+                ok         -> {ok, Node}
+            end
     end.
 
 is_only_clustered_disc_node() ->
     node_type() =:= disc andalso is_clustered() andalso
         cluster_nodes(disc) =:= [node()].
 
+are_we_clustered_with(Node) ->
+    lists:member(Node, mnesia_lib:all_nodes()).
+
 me_in_nodes(Nodes) -> lists:member(node(), Nodes).
 
 nodes_incl_me(Nodes) -> lists:usort([node()|Nodes]).
@@ -898,3 +956,8 @@ error_description(removing_node_from_offline_node) ->
         "from must be a disc node and all the other nodes must be offline.";
 error_description(no_running_cluster_nodes) ->
     "You cannot leave a cluster if no online nodes are present.".
+
+format_inconsistent_cluster_message(Thinker, Dissident) ->
+    rabbit_misc:format("Node ~p thinks it's clustered "
+                       "with node ~p, but ~p disagrees",
+                       [Thinker, Dissident, Dissident]).
similarity index 98%
rename from rabbitmq-server/src/rabbit_mnesia_rename.erl
rename to deps/rabbit/src/rabbit_mnesia_rename.erl
index 260421658d5d98096d36289e25afcd4386497d49..0945e31522341c961e4a4ca69df5a38f46aa7ac2 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mnesia_rename).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(rename/2 :: (node(), [{node(), node()}]) -> 'ok').
--spec(maybe_finish/1 :: ([node()]) -> 'ok').
-
--endif.
+-spec rename(node(), [{node(), node()}]) -> 'ok'.
+-spec maybe_finish([node()]) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
similarity index 85%
rename from rabbitmq-server/src/rabbit_msg_file.erl
rename to deps/rabbit/src/rabbit_msg_file.erl
index 06879b9f6dbfa56d471cb6634ff0bb1db9a106be..5c0acc5ffd903f53da74f11ba76d304c4b2d4f0f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_msg_file).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(io_device() :: any()).
--type(position() :: non_neg_integer()).
--type(msg_size() :: non_neg_integer()).
--type(file_size() :: non_neg_integer()).
--type(message_accumulator(A) ::
+-type io_device() :: any().
+-type position() :: non_neg_integer().
+-type msg_size() :: non_neg_integer().
+-type file_size() :: non_neg_integer().
+-type message_accumulator(A) ::
         fun (({rabbit_types:msg_id(), msg_size(), position(), binary()}, A) ->
-                    A)).
-
--spec(append/3 :: (io_device(), rabbit_types:msg_id(), msg()) ->
-                       rabbit_types:ok_or_error2(msg_size(), any())).
--spec(read/2 :: (io_device(), msg_size()) ->
-                     rabbit_types:ok_or_error2({rabbit_types:msg_id(), msg()},
-                                               any())).
--spec(scan/4 :: (io_device(), file_size(), message_accumulator(A), A) ->
-                     {'ok', A, position()}).
-
--endif.
+            A).
+
+-spec append(io_device(), rabbit_types:msg_id(), msg()) ->
+          rabbit_types:ok_or_error2(msg_size(), any()).
+-spec read(io_device(), msg_size()) ->
+          rabbit_types:ok_or_error2({rabbit_types:msg_id(), msg()},
+                                    any()).
+-spec scan(io_device(), file_size(), message_accumulator(A), A) ->
+          {'ok', A, position()}.
 
 %%----------------------------------------------------------------------------
 
similarity index 93%
rename from rabbitmq-server/src/rabbit_msg_store.erl
rename to deps/rabbit/src/rabbit_msg_store.erl
index 8909484984d7f3c35931f2fc9fba3f7f2636a48f..d3ff077c8ba456179e6e8f1c309de251c00faa03 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_msg_store).
 %%----------------------------------------------------------------------------
 
 -record(msstate,
-        { dir,                    %% store directory
-          index_module,           %% the module for index ops
-          index_state,            %% where are messages?
-          current_file,           %% current file name as number
-          current_file_handle,    %% current file handle since the last fsync?
-          file_handle_cache,      %% file handle cache
-          sync_timer_ref,         %% TRef for our interval timer
-          sum_valid_data,         %% sum of valid data in all files
-          sum_file_size,          %% sum of file sizes
-          pending_gc_completion,  %% things to do once GC completes
-          gc_pid,                 %% pid of our GC
-          file_handles_ets,       %% tid of the shared file handles table
-          file_summary_ets,       %% tid of the file summary table
-          cur_file_cache_ets,     %% tid of current file cache table
-          flying_ets,             %% tid of writes/removes in flight
-          dying_clients,          %% set of dying clients
-          clients,                %% map of references of all registered clients
-                                  %% to callbacks
-          successfully_recovered, %% boolean: did we recover state?
-          file_size_limit,        %% how big are our files allowed to get?
-          cref_to_msg_ids,        %% client ref to synced messages mapping
-          credit_disc_bound       %% See rabbit.hrl CREDIT_DISC_BOUND
+        {
+          %% store directory
+          dir,
+          %% the module for index ops,
+          %% rabbit_msg_store_ets_index by default
+          index_module,
+          %% %% where are messages?
+          index_state,
+          %% current file name as number
+          current_file,
+          %% current file handle since the last fsync?
+          current_file_handle,
+          %% file handle cache
+          file_handle_cache,
+          %% TRef for our interval timer
+          sync_timer_ref,
+          %% sum of valid data in all files
+          sum_valid_data,
+          %% sum of file sizes
+          sum_file_size,
+          %% things to do once GC completes
+          pending_gc_completion,
+          %% pid of our GC
+          gc_pid,
+          %% tid of the shared file handles table
+          file_handles_ets,
+          %% tid of the file summary table
+          file_summary_ets,
+          %% tid of current file cache table
+          cur_file_cache_ets,
+          %% tid of writes/removes in flight
+          flying_ets,
+          %% set of dying clients
+          dying_clients,
+          %% index of file positions for client death messages
+          dying_client_index,
+          %% map of references of all registered clients
+          %% to callbacks
+          clients,
+          %% boolean: did we recover state?
+          successfully_recovered,
+          %% how big are our files allowed to get?
+          file_size_limit,
+          %% client ref to synced messages mapping
+          cref_to_msg_ids,
+          %% See CREDIT_DISC_BOUND in rabbit.hrl
+          credit_disc_bound
         }).
 
 -record(client_msstate,
           msg_store
         }).
 
-%%----------------------------------------------------------------------------
+-record(dying_client,
+        { client_ref,
+          file,
+          offset
+        }).
 
--ifdef(use_specs).
+%%----------------------------------------------------------------------------
 
 -export_type([gc_state/0, file_num/0]).
 
--type(gc_state() :: #gc_state { dir              :: file:filename(),
+-type gc_state() :: #gc_state { dir              :: file:filename(),
                                 index_module     :: atom(),
                                 index_state      :: any(),
                                 file_summary_ets :: ets:tid(),
                                 file_handles_ets :: ets:tid(),
                                 msg_store        :: server()
-                              }).
+                              }.
 
--type(server() :: pid() | atom()).
--type(client_ref() :: binary()).
--type(file_num() :: non_neg_integer()).
--type(client_msstate() :: #client_msstate {
+-type server() :: pid() | atom().
+-type client_ref() :: binary().
+-type file_num() :: non_neg_integer().
+-type client_msstate() :: #client_msstate {
                       server             :: server(),
                       client_ref         :: client_ref(),
-                      file_handle_cache  :: dict:dict(),
+                      file_handle_cache  :: ?DICT_TYPE(),
                       index_state        :: any(),
                       index_module       :: atom(),
                       dir                :: file:filename(),
                       file_summary_ets   :: ets:tid(),
                       cur_file_cache_ets :: ets:tid(),
                       flying_ets         :: ets:tid(),
-                      credit_disc_bound  :: {pos_integer(), pos_integer()}}).
--type(msg_ref_delta_gen(A) ::
+                      credit_disc_bound  :: {pos_integer(), pos_integer()}}.
+-type msg_ref_delta_gen(A) ::
         fun ((A) -> 'finished' |
-                    {rabbit_types:msg_id(), non_neg_integer(), A})).
--type(maybe_msg_id_fun() ::
-        'undefined' | fun ((gb_sets:set(), 'written' | 'ignored') -> any())).
--type(maybe_close_fds_fun() :: 'undefined' | fun (() -> 'ok')).
--type(deletion_thunk() :: fun (() -> boolean())).
+                    {rabbit_types:msg_id(), non_neg_integer(), A}).
+-type maybe_msg_id_fun() ::
+        'undefined' | fun ((?GB_SET_TYPE(), 'written' | 'ignored') -> any()).
+-type maybe_close_fds_fun() :: 'undefined' | fun (() -> 'ok').
+-type deletion_thunk() :: fun (() -> boolean()).
 
--spec(start_link/4 ::
+-spec start_link
         (atom(), file:filename(), [binary()] | 'undefined',
-         {msg_ref_delta_gen(A), A}) -> rabbit_types:ok_pid_or_error()).
--spec(successfully_recovered_state/1 :: (server()) -> boolean()).
--spec(client_init/4 :: (server(), client_ref(), maybe_msg_id_fun(),
-                        maybe_close_fds_fun()) -> client_msstate()).
--spec(client_terminate/1 :: (client_msstate()) -> 'ok').
--spec(client_delete_and_terminate/1 :: (client_msstate()) -> 'ok').
--spec(client_ref/1 :: (client_msstate()) -> client_ref()).
--spec(close_all_indicated/1 ::
-        (client_msstate()) -> rabbit_types:ok(client_msstate())).
--spec(write/3 :: (rabbit_types:msg_id(), msg(), client_msstate()) -> 'ok').
--spec(write_flow/3 :: (rabbit_types:msg_id(), msg(), client_msstate()) -> 'ok').
--spec(read/2 :: (rabbit_types:msg_id(), client_msstate()) ->
-                     {rabbit_types:ok(msg()) | 'not_found', client_msstate()}).
--spec(contains/2 :: (rabbit_types:msg_id(), client_msstate()) -> boolean()).
--spec(remove/2 :: ([rabbit_types:msg_id()], client_msstate()) -> 'ok').
-
--spec(set_maximum_since_use/2 :: (server(), non_neg_integer()) -> 'ok').
--spec(has_readers/2 :: (non_neg_integer(), gc_state()) -> boolean()).
--spec(combine_files/3 :: (non_neg_integer(), non_neg_integer(), gc_state()) ->
-                              deletion_thunk()).
--spec(delete_file/2 :: (non_neg_integer(), gc_state()) -> deletion_thunk()).
--spec(force_recovery/2 :: (file:filename(), server()) -> 'ok').
--spec(transform_dir/3 :: (file:filename(), server(),
-        fun ((any()) -> (rabbit_types:ok_or_error2(msg(), any())))) -> 'ok').
-
--endif.
+         {msg_ref_delta_gen(A), A}) -> rabbit_types:ok_pid_or_error().
+-spec successfully_recovered_state(server()) -> boolean().
+-spec client_init(server(), client_ref(), maybe_msg_id_fun(),
+                        maybe_close_fds_fun()) -> client_msstate().
+-spec client_terminate(client_msstate()) -> 'ok'.
+-spec client_delete_and_terminate(client_msstate()) -> 'ok'.
+-spec client_ref(client_msstate()) -> client_ref().
+-spec close_all_indicated
+        (client_msstate()) -> rabbit_types:ok(client_msstate()).
+-spec write(rabbit_types:msg_id(), msg(), client_msstate()) -> 'ok'.
+-spec write_flow(rabbit_types:msg_id(), msg(), client_msstate()) -> 'ok'.
+-spec read(rabbit_types:msg_id(), client_msstate()) ->
+                     {rabbit_types:ok(msg()) | 'not_found', client_msstate()}.
+-spec contains(rabbit_types:msg_id(), client_msstate()) -> boolean().
+-spec remove([rabbit_types:msg_id()], client_msstate()) -> 'ok'.
+
+-spec set_maximum_since_use(server(), non_neg_integer()) -> 'ok'.
+-spec has_readers(non_neg_integer(), gc_state()) -> boolean().
+-spec combine_files(non_neg_integer(), non_neg_integer(), gc_state()) ->
+                              deletion_thunk().
+-spec delete_file(non_neg_integer(), gc_state()) -> deletion_thunk().
+-spec force_recovery(file:filename(), server()) -> 'ok'.
+-spec transform_dir(file:filename(), server(),
+        fun ((any()) -> (rabbit_types:ok_or_error2(msg(), any())))) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
 %% It is not recommended to set this to < 0.5
 -define(GARBAGE_FRACTION,      0.5).
 
+%% Message store is responsible for storing messages
+%% on disk and loading them back. The store handles both
+%% persistent messages and transient ones (when a node
+%% is under RAM pressure and needs to page messages out
+%% to disk). The store is responsible for locating messages
+%% on disk and maintaining an index.
+%%
+%% There are two message stores per node: one for transient
+%% and one for persistent messages.
+%%
+%% Queue processes interact with the stores via clients.
+%%
 %% The components:
 %%
-%% Index: this is a mapping from MsgId to #msg_location{}:
-%%        {MsgId, RefCount, File, Offset, TotalSize}
-%%        By default, it's in ets, but it's also pluggable.
-%% FileSummary: this is an ets table which maps File to #file_summary{}:
-%%        {File, ValidTotalSize, Left, Right, FileSize, Locked, Readers}
+%% Index: this is a mapping from MsgId to #msg_location{}.
+%%        By default, it's in ETS, but other implementations can
+%%        be used.
+%% FileSummary: this maps File to #file_summary{} and is stored
+%%              in ETS.
 %%
 %% The basic idea is that messages are appended to the current file up
 %% until that file becomes too big (> file_size_limit). At that point,
 %% eldest file.
 %%
 %% We need to keep track of which messages are in which files (this is
-%% the Index); how much useful data is in each file and which files
+%% the index); how much useful data is in each file and which files
 %% are on the left and right of each other. This is the purpose of the
-%% FileSummary ets table.
+%% file summary ETS table.
 %%
 %% As messages are removed from files, holes appear in these
 %% files. The field ValidTotalSize contains the total amount of useful
 %% which will compact the two files together. This keeps disk
 %% utilisation high and aids performance. We deliberately do this
 %% lazily in order to prevent doing GC on files which are soon to be
-%% emptied (and hence deleted) soon.
+%% emptied (and hence deleted).
 %%
 %% Given the compaction between two files, the left file (i.e. elder
 %% file) is considered the ultimate destination for the good data in
 %% file, then read back in to form a contiguous chunk of good data at
 %% the start of the left file. Thus the left file is garbage collected
 %% and compacted. Then the good data from the right file is copied
-%% onto the end of the left file. Index and FileSummary tables are
+%% onto the end of the left file. Index and file summary tables are
 %% updated.
 %%
 %% On non-clean startup, we scan the files we discover, dealing with
 %% the possibilites of a crash having occured during a compaction
 %% (this consists of tidyup - the compaction is deliberately designed
 %% such that data is duplicated on disk rather than risking it being
-%% lost), and rebuild the FileSummary ets table and Index.
+%% lost), and rebuild the file summary and index ETS table.
 %%
 %% So, with this design, messages move to the left. Eventually, they
 %% should end up in a contiguous block on the left and are then never
 %% queue, though it's likely that that's pessimistic, given the
 %% requirements for compaction/combination of files.
 %%
-%% The other property is that we have is the bound on the lowest
+%% The other property that we have is the bound on the lowest
 %% utilisation, which should be 50% - worst case is that all files are
 %% fractionally over half full and can't be combined (equivalent is
 %% alternating full files and files with only one tiny message in
 %% performance with many healthy clients and few, if any, dying
 %% clients, which is the typical case.
 %%
+%% Client termination messages are stored in a separate ets index to
+%% avoid filling primary message store index and message files with
+%% client termination messages.
+%%
 %% When the msg_store has a backlog (i.e. it has unprocessed messages
 %% in its mailbox / gen_server priority queue), a further optimisation
 %% opportunity arises: we can eliminate pairs of 'write' and 'remove'
 %% address. See the comments in the code.
 %%
 %% For notes on Clean Shutdown and startup, see documentation in
-%% variable_queue.
+%% rabbit_variable_queue.
 
 %%----------------------------------------------------------------------------
 %% public API
@@ -656,7 +699,9 @@ client_update_flying(Diff, MsgId, #client_msstate { flying_ets = FlyingEts,
     end.
 
 clear_client(CRef, State = #msstate { cref_to_msg_ids = CTM,
-                                      dying_clients = DyingClients }) ->
+                                      dying_clients = DyingClients,
+                                      dying_client_index = DyingIndex }) ->
+    ets:delete(DyingIndex, CRef),
     State #msstate { cref_to_msg_ids = dict:erase(CRef, CTM),
                      dying_clients = sets:del_element(CRef, DyingClients) }.
 
@@ -710,6 +755,8 @@ init([Server, BaseDir, ClientRefs, StartupFunState]) ->
                               [ordered_set, public]),
     CurFileCacheEts = ets:new(rabbit_msg_store_cur_file, [set, public]),
     FlyingEts       = ets:new(rabbit_msg_store_flying, [set, public]),
+    DyingIndex      = ets:new(rabbit_msg_store_dying_client_index,
+                              [set, public, {keypos, #dying_client.client_ref}]),
 
     {ok, FileSizeLimit} = application:get_env(msg_store_file_size_limit),
 
@@ -741,6 +788,7 @@ init([Server, BaseDir, ClientRefs, StartupFunState]) ->
                        cur_file_cache_ets     = CurFileCacheEts,
                        flying_ets             = FlyingEts,
                        dying_clients          = sets:new(),
+                       dying_client_index     = DyingIndex,
                        clients                = Clients,
                        successfully_recovered = CleanShutdown,
                        file_size_limit        = FileSizeLimit,
@@ -817,15 +865,21 @@ handle_call({contains, MsgId}, From, State) ->
     noreply(State1).
 
 handle_cast({client_dying, CRef},
-            State = #msstate { dying_clients = DyingClients }) ->
+            State = #msstate { dying_clients       = DyingClients,
+                               dying_client_index  = DyingIndex,
+                               current_file_handle = CurHdl,
+                               current_file        = CurFile }) ->
     DyingClients1 = sets:add_element(CRef, DyingClients),
-    noreply(write_message(CRef, <<>>,
-                          State #msstate { dying_clients = DyingClients1 }));
+    {ok, CurOffset} = file_handle_cache:current_virtual_offset(CurHdl),
+    true = ets:insert_new(DyingIndex, #dying_client{client_ref = CRef,
+                                                    file = CurFile,
+                                                    offset = CurOffset}),
+    noreply(State #msstate { dying_clients = DyingClients1 });
 
 handle_cast({client_delete, CRef},
             State = #msstate { clients = Clients }) ->
     State1 = State #msstate { clients = dict:erase(CRef, Clients) },
-    noreply(remove_message(CRef, CRef, clear_client(CRef, State1)));
+    noreply(clear_client(CRef, State1));
 
 handle_cast({write, CRef, MsgId, Flow},
             State = #msstate { cur_file_cache_ets = CurFileCacheEts,
@@ -1303,7 +1357,8 @@ blind_confirm(CRef, MsgIds, ActionTaken, State) ->
 %% msg and thus should be ignored. Note that this (correctly) returns
 %% false when testing to remove the death msg itself.
 should_mask_action(CRef, MsgId,
-                   State = #msstate { dying_clients = DyingClients }) ->
+                   State = #msstate { dying_clients = DyingClients,
+                                      dying_client_index = DyingIndex }) ->
     case {sets:is_element(CRef, DyingClients), index_lookup(MsgId, State)} of
         {false, Location} ->
             {false, Location};
@@ -1311,8 +1366,8 @@ should_mask_action(CRef, MsgId,
             {true, not_found};
         {true, #msg_location { file = File, offset = Offset,
                                ref_count = RefCount } = Location} ->
-            #msg_location { file = DeathFile, offset = DeathOffset } =
-                index_lookup(CRef, State),
+            [#dying_client { file = DeathFile, offset = DeathOffset }] =
+                ets:lookup(DyingIndex, CRef),
             {case {{DeathFile, DeathOffset} < {File, Offset}, RefCount} of
                  {true,  _} -> true;
                  {false, 0} -> false_if_increment;
@@ -1325,9 +1380,10 @@ should_mask_action(CRef, MsgId,
 %%----------------------------------------------------------------------------
 
 open_file(Dir, FileName, Mode) ->
-    file_handle_cache:open(form_filename(Dir, FileName), ?BINARY_MODE ++ Mode,
-                           [{write_buffer, ?HANDLE_CACHE_BUFFER_SIZE},
-                            {read_buffer,  ?HANDLE_CACHE_BUFFER_SIZE}]).
+    file_handle_cache:open_with_absolute_path(
+      form_filename(Dir, FileName), ?BINARY_MODE ++ Mode,
+      [{write_buffer, ?HANDLE_CACHE_BUFFER_SIZE},
+       {read_buffer,  ?HANDLE_CACHE_BUFFER_SIZE}]).
 
 close_handle(Key, CState = #client_msstate { file_handle_cache = FHC }) ->
     CState #client_msstate { file_handle_cache = close_handle(Key, FHC) };
@@ -2077,10 +2133,11 @@ transform_dir(BaseDir, Store, TransformFun) ->
 
 transform_msg_file(FileOld, FileNew, TransformFun) ->
     ok = rabbit_file:ensure_parent_dirs_exist(FileNew),
-    {ok, RefOld} = file_handle_cache:open(FileOld, [raw, binary, read], []),
-    {ok, RefNew} = file_handle_cache:open(FileNew, [raw, binary, write],
-                                          [{write_buffer,
-                                            ?HANDLE_CACHE_BUFFER_SIZE}]),
+    {ok, RefOld} = file_handle_cache:open_with_absolute_path(
+                     FileOld, [raw, binary, read], []),
+    {ok, RefNew} = file_handle_cache:open_with_absolute_path(
+                     FileNew, [raw, binary, write],
+                     [{write_buffer, ?HANDLE_CACHE_BUFFER_SIZE}]),
     {ok, _Acc, _IgnoreSize} =
         rabbit_msg_file:scan(
           RefOld, filelib:file_size(FileOld),
similarity index 97%
rename from rabbitmq-server/src/rabbit_msg_store_ets_index.erl
rename to deps/rabbit/src/rabbit_msg_store_ets_index.erl
index f3257e3112584d01e1a6ad3ac284aa188d60b51d..76ef112069c5cf66a391857b7e0e7c6b66cffd1a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_msg_store_ets_index).
similarity index 88%
rename from rabbitmq-server/src/rabbit_msg_store_gc.erl
rename to deps/rabbit/src/rabbit_msg_store_gc.erl
index 3a5b4d00453b6232102b24c5c795da9f1c01bff1..9cfdba8a8d5790a5bc543fd48dbfaf4d9a95958b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_msg_store_gc).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/1 :: (rabbit_msg_store:gc_state()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(combine/3 :: (pid(), rabbit_msg_store:file_num(),
-                    rabbit_msg_store:file_num()) -> 'ok').
--spec(delete/2 :: (pid(), rabbit_msg_store:file_num()) -> 'ok').
--spec(no_readers/2 :: (pid(), rabbit_msg_store:file_num()) -> 'ok').
--spec(stop/1 :: (pid()) -> 'ok').
--spec(set_maximum_since_use/2 :: (pid(), non_neg_integer()) -> 'ok').
-
--endif.
+-spec start_link(rabbit_msg_store:gc_state()) ->
+                           rabbit_types:ok_pid_or_error().
+-spec combine(pid(), rabbit_msg_store:file_num(),
+                    rabbit_msg_store:file_num()) -> 'ok'.
+-spec delete(pid(), rabbit_msg_store:file_num()) -> 'ok'.
+-spec no_readers(pid(), rabbit_msg_store:file_num()) -> 'ok'.
+-spec stop(pid()) -> 'ok'.
+-spec set_maximum_since_use(pid(), non_neg_integer()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
similarity index 90%
rename from rabbitmq-server/src/rabbit_node_monitor.erl
rename to deps/rabbit/src/rabbit_node_monitor.erl
index e3960c5c8a6902ac5251659229195643b10ede38..bea2a3fa9694eee317c6665a03c2f3fae8cd26cf 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_node_monitor).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-spec start_link() -> rabbit_types:ok_pid_or_error().
 
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
+-spec running_nodes_filename() -> string().
+-spec cluster_status_filename() -> string().
+-spec prepare_cluster_status_files() -> 'ok'.
+-spec write_cluster_status(rabbit_mnesia:cluster_status()) -> 'ok'.
+-spec read_cluster_status() -> rabbit_mnesia:cluster_status().
+-spec update_cluster_status() -> 'ok'.
+-spec reset_cluster_status() -> 'ok'.
 
--spec(running_nodes_filename/0 :: () -> string()).
--spec(cluster_status_filename/0 :: () -> string()).
--spec(prepare_cluster_status_files/0 :: () -> 'ok').
--spec(write_cluster_status/1 :: (rabbit_mnesia:cluster_status()) -> 'ok').
--spec(read_cluster_status/0 :: () -> rabbit_mnesia:cluster_status()).
--spec(update_cluster_status/0 :: () -> 'ok').
--spec(reset_cluster_status/0 :: () -> 'ok').
+-spec notify_node_up() -> 'ok'.
+-spec notify_joined_cluster() -> 'ok'.
+-spec notify_left_cluster(node()) -> 'ok'.
 
--spec(notify_node_up/0 :: () -> 'ok').
--spec(notify_joined_cluster/0 :: () -> 'ok').
--spec(notify_left_cluster/1 :: (node()) -> 'ok').
+-spec partitions() -> [node()].
+-spec partitions([node()]) -> [{node(), [node()]}].
+-spec status([node()]) -> {[{node(), [node()]}], [node()]}.
+-spec subscribe(pid()) -> 'ok'.
+-spec pause_partition_guard() -> 'ok' | 'pausing'.
 
--spec(partitions/0 :: () -> [node()]).
--spec(partitions/1 :: ([node()]) -> [{node(), [node()]}]).
--spec(status/1 :: ([node()]) -> {[{node(), [node()]}], [node()]}).
--spec(subscribe/1 :: (pid()) -> 'ok').
--spec(pause_partition_guard/0 :: () -> 'ok' | 'pausing').
-
--spec(all_rabbit_nodes_up/0 :: () -> boolean()).
--spec(run_outside_applications/2 :: (fun (() -> any()), boolean()) -> pid()).
--spec(ping_all/0 :: () -> 'ok').
--spec(alive_nodes/1 :: ([node()]) -> [node()]).
--spec(alive_rabbit_nodes/1 :: ([node()]) -> [node()]).
-
--endif.
+-spec all_rabbit_nodes_up() -> boolean().
+-spec run_outside_applications(fun (() -> any()), boolean()) -> pid().
+-spec ping_all() -> 'ok'.
+-spec alive_nodes([node()]) -> [node()].
+-spec alive_rabbit_nodes([node()]) -> [node()].
 
 %%----------------------------------------------------------------------------
 %% Start
@@ -288,24 +284,28 @@ workaround_global_hang() ->
     receive
         global_sync_done ->
             ok
-    after 15000 ->
+    after 10000 ->
             find_blocked_global_peers()
     end.
 
 find_blocked_global_peers() ->
+    Snapshot1 = snapshot_global_dict(),
+    timer:sleep(10000),
+    Snapshot2 = snapshot_global_dict(),
+    find_blocked_global_peers1(Snapshot2, Snapshot1).
+
+snapshot_global_dict() ->
     {status, _, _, [Dict | _]} = sys:get_status(global_name_server),
-    find_blocked_global_peers1(Dict).
+    [E || {{sync_tag_his, _}, _} = E <- Dict].
 
-find_blocked_global_peers1([{{sync_tag_his, Peer}, Timestamp} | Rest]) ->
-    Diff = timer:now_diff(erlang:now(), Timestamp),
-    if
-        Diff >= 10000 -> unblock_global_peer(Peer);
-        true          -> ok
+find_blocked_global_peers1([{{sync_tag_his, Peer}, _} = Item | Rest],
+  OlderSnapshot) ->
+    case lists:member(Item, OlderSnapshot) of
+        true  -> unblock_global_peer(Peer);
+        false -> ok
     end,
-    find_blocked_global_peers1(Rest);
-find_blocked_global_peers1([_ | Rest]) ->
-    find_blocked_global_peers1(Rest);
-find_blocked_global_peers1([]) ->
+    find_blocked_global_peers1(Rest, OlderSnapshot);
+find_blocked_global_peers1([], _) ->
     ok.
 
 unblock_global_peer(PeerNode) ->
@@ -336,7 +336,17 @@ init([]) ->
     process_flag(trap_exit, true),
     net_kernel:monitor_nodes(true, [nodedown_reason]),
     {ok, _} = mnesia:subscribe(system),
-    {ok, ensure_keepalive_timer(#state{monitors    = pmon:new(),
+    %% If the node has been restarted, Mnesia can trigger a system notification
+    %% before the monitor subscribes to receive them. To avoid autoheal blocking due to
+    %% the inconsistent database event never arriving, we being monitoring all running
+    %% nodes as early as possible. The rest of the monitoring ops will only be triggered
+    %% when notifications arrive.
+    Nodes = possibly_partitioned_nodes(),
+    startup_log(Nodes),
+    Monitors = lists:foldl(fun(Node, Monitors0) ->
+                                  pmon:monitor({rabbit, Node}, Monitors0)
+                          end, pmon:new(), Nodes),
+    {ok, ensure_keepalive_timer(#state{monitors    = Monitors,
                                        subscribers = pmon:new(),
                                        partitions  = [],
                                        guid        = rabbit_guid:gen(),
@@ -410,7 +420,12 @@ handle_cast({check_partial_partition, Node, Rep, NodeGUID, MyGUID, RepGUID},
                    fun () ->
                            case rpc:call(Node, rabbit, is_running, []) of
                                {badrpc, _} -> ok;
-                               _           -> cast(Rep, {partial_partition,
+                               _           ->  
+                                  rabbit_log:warning("Received a 'DOWN' message" 
+                                                     " from ~p but still can" 
+                                                     " communicate with it ~n",
+                                                     [Node]),
+                                  cast(Rep, {partial_partition,
                                                          Node, node(), RepGUID})
                            end
                    end);
@@ -481,20 +496,22 @@ handle_cast({partial_partition_disconnect, Other}, State) ->
 %% mnesia propagation.
 handle_cast({node_up, Node, NodeType},
             State = #state{monitors = Monitors}) ->
-    case pmon:is_monitored({rabbit, Node}, Monitors) of
-        true  -> {noreply, State};
-        false -> rabbit_log:info("rabbit on node ~p up~n", [Node]),
-                 {AllNodes, DiscNodes, RunningNodes} = read_cluster_status(),
-                 write_cluster_status({add_node(Node, AllNodes),
-                                       case NodeType of
-                                           disc -> add_node(Node, DiscNodes);
-                                           ram  -> DiscNodes
-                                       end,
-                                       add_node(Node, RunningNodes)}),
-                 ok = handle_live_rabbit(Node),
-                 Monitors1 = pmon:monitor({rabbit, Node}, Monitors),
-                 {noreply, maybe_autoheal(State#state{monitors = Monitors1})}
-    end;
+    rabbit_log:info("rabbit on node ~p up~n", [Node]),
+    {AllNodes, DiscNodes, RunningNodes} = read_cluster_status(),
+    write_cluster_status({add_node(Node, AllNodes),
+                         case NodeType of
+                             disc -> add_node(Node, DiscNodes);
+                             ram  -> DiscNodes
+                         end,
+                         add_node(Node, RunningNodes)}),
+    ok = handle_live_rabbit(Node),
+    Monitors1 = case pmon:is_monitored({rabbit, Node}, Monitors) of
+                   true ->
+                       Monitors;
+                   false ->
+                       pmon:monitor({rabbit, Node}, Monitors)
+               end,
+    {noreply, maybe_autoheal(State#state{monitors = Monitors1})};
 
 handle_cast({joined_cluster, Node, NodeType}, State) ->
     {AllNodes, DiscNodes, RunningNodes} = read_cluster_status(),
@@ -567,7 +584,7 @@ handle_info({mnesia_system_event,
     State1 = case pmon:is_monitored({rabbit, Node}, Monitors) of
                  true  -> State;
                  false -> State#state{
-                            monitors = pmon:monitor({rabbit, Node}, Monitors)}
+                           monitors = pmon:monitor({rabbit, Node}, Monitors)}
              end,
     ok = handle_live_rabbit(Node),
     Partitions1 = lists:usort([Node | Partitions]),
@@ -868,3 +885,12 @@ alive_rabbit_nodes(Nodes) ->
 ping_all() ->
     [net_adm:ping(N) || N <- rabbit_mnesia:cluster_nodes(all)],
     ok.
+
+possibly_partitioned_nodes() ->
+    alive_rabbit_nodes() -- rabbit_mnesia:cluster_nodes(running).
+
+startup_log([]) ->
+    rabbit_log:info("Starting rabbit_node_monitor~n", []);
+startup_log(Nodes) ->
+    rabbit_log:info("Starting rabbit_node_monitor, might be partitioned from ~p~n",
+                   [Nodes]).
similarity index 97%
rename from rabbitmq-server/src/rabbit_parameter_validation.erl
rename to deps/rabbit/src/rabbit_parameter_validation.erl
index a54f02bf06baab69d5eeb6e03fa8562eab3105b7..90ab1d528611d2f4ff5f1155b6cdaaf941e66509 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_parameter_validation).
diff --git a/deps/rabbit/src/rabbit_password.erl b/deps/rabbit/src/rabbit_password.erl
new file mode 100644 (file)
index 0000000..b7987df
--- /dev/null
@@ -0,0 +1,61 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_password).
+-include("rabbit.hrl").
+
+-define(DEFAULT_HASHING_MODULE, rabbit_password_hashing_sha256).
+
+%%
+%% API
+%%
+
+-export([hash/1, hash/2, generate_salt/0, salted_hash/2, salted_hash/3,
+         hashing_mod/0, hashing_mod/1]).
+
+hash(Cleartext) ->
+    hash(hashing_mod(), Cleartext).
+
+hash(HashingMod, Cleartext) ->
+    SaltBin = generate_salt(),
+    Hash = salted_hash(HashingMod, SaltBin, Cleartext),
+    <<SaltBin/binary, Hash/binary>>.
+
+generate_salt() ->
+    Salt = rand_compat:uniform(16#ffffffff),
+    <<Salt:32>>.
+
+salted_hash(Salt, Cleartext) ->
+    salted_hash(hashing_mod(), Salt, Cleartext).
+
+salted_hash(Mod, Salt, Cleartext) ->
+    Fun = fun Mod:hash/1,
+    Fun(<<Salt/binary, Cleartext/binary>>).
+
+hashing_mod() ->
+    rabbit_misc:get_env(rabbit, password_hashing_module,
+        ?DEFAULT_HASHING_MODULE).
+
+hashing_mod(rabbit_password_hashing_sha256) ->
+    rabbit_password_hashing_sha256;
+hashing_mod(rabbit_password_hashing_md5) ->
+    rabbit_password_hashing_md5;
+%% fall back to the hashing function that's been used prior to 3.6.0
+hashing_mod(undefined) ->
+    rabbit_password_hashing_md5;
+%% if a custom module is configured, simply use it
+hashing_mod(CustomMod) when is_atom(CustomMod) ->
+    CustomMod.
diff --git a/deps/rabbit/src/rabbit_password_hashing_md5.erl b/deps/rabbit/src/rabbit_password_hashing_md5.erl
new file mode 100644 (file)
index 0000000..75d8e4e
--- /dev/null
@@ -0,0 +1,28 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+%% Legacy hashing implementation, only used as a last resort when
+%% #internal_user.hashing_algorithm is md5 or undefined (the case in
+%% pre-3.6.0 user records).
+
+-module(rabbit_password_hashing_md5).
+
+-behaviour(rabbit_password_hashing).
+
+-export([hash/1]).
+
+hash(Binary) ->
+    erlang:md5(Binary).
diff --git a/deps/rabbit/src/rabbit_password_hashing_sha256.erl b/deps/rabbit/src/rabbit_password_hashing_sha256.erl
new file mode 100644 (file)
index 0000000..5df4d93
--- /dev/null
@@ -0,0 +1,24 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_password_hashing_sha256).
+
+-behaviour(rabbit_password_hashing).
+
+-export([hash/1]).
+
+hash(Binary) ->
+    crypto:hash(sha256, Binary).
diff --git a/deps/rabbit/src/rabbit_password_hashing_sha512.erl b/deps/rabbit/src/rabbit_password_hashing_sha512.erl
new file mode 100644 (file)
index 0000000..5a4b960
--- /dev/null
@@ -0,0 +1,24 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_password_hashing_sha512).
+
+-behaviour(rabbit_password_hashing).
+
+-export([hash/1]).
+
+hash(Binary) ->
+    crypto:hash(sha512, Binary).
diff --git a/deps/rabbit/src/rabbit_pbe.erl b/deps/rabbit/src/rabbit_pbe.erl
new file mode 100644 (file)
index 0000000..f4998d4
--- /dev/null
@@ -0,0 +1,194 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_pbe).
+
+-export([supported_ciphers/0, supported_hashes/0, default_cipher/0, default_hash/0, default_iterations/0]).
+-export([encrypt_term/5, decrypt_term/5]).
+-export([encrypt/5, decrypt/5]).
+
+%% Supported ciphers and hashes
+
+supported_ciphers() ->
+    proplists:get_value(ciphers, crypto:supports())
+        -- [aes_ctr, aes_ecb, des_ecb, blowfish_ecb, rc4, aes_gcm].
+
+supported_hashes() ->
+    proplists:get_value(hashs, crypto:supports())
+        -- [md4, ripemd160].
+
+%% Default encryption parameters (keep those in sync with rabbit.app.src)
+default_cipher() ->
+    aes_cbc256.
+
+default_hash() ->
+    sha512.
+
+default_iterations() ->
+    1000.
+
+%% Encryption/decryption of arbitrary Erlang terms.
+
+encrypt_term(Cipher, Hash, Iterations, PassPhrase, Term) ->
+    encrypt(Cipher, Hash, Iterations, PassPhrase, term_to_binary(Term)).
+
+decrypt_term(Cipher, Hash, Iterations, PassPhrase, Base64Binary) ->
+    binary_to_term(decrypt(Cipher, Hash, Iterations, PassPhrase, Base64Binary)).
+
+%% The cipher for encryption is from the list of supported ciphers.
+%% The hash for generating the key from the passphrase is from the list
+%% of supported hashes. See crypto:supports/0 to obtain both lists.
+%% The key is generated by applying the hash N times with N >= 1.
+%%
+%% The encrypt/5 function returns a base64 binary and the decrypt/5
+%% function accepts that same base64 binary.
+
+-spec encrypt(crypto:block_cipher(), crypto:hash_algorithms(),
+    pos_integer(), iodata(), binary()) -> binary().
+encrypt(Cipher, Hash, Iterations, PassPhrase, ClearText) ->
+    Salt = crypto:strong_rand_bytes(16),
+    Ivec = crypto:strong_rand_bytes(iv_length(Cipher)),
+    Key = make_key(Cipher, Hash, Iterations, PassPhrase, Salt),
+    Binary = crypto:block_encrypt(Cipher, Key, Ivec, pad(Cipher, ClearText)),
+    base64:encode(<< Salt/binary, Ivec/binary, Binary/binary >>).
+
+-spec decrypt(crypto:block_cipher(), crypto:hash_algorithms(),
+    pos_integer(), iodata(), binary()) -> binary().
+decrypt(Cipher, Hash, Iterations, PassPhrase, Base64Binary) ->
+    IvLength = iv_length(Cipher),
+    << Salt:16/binary, Ivec:IvLength/binary, Binary/bits >> = base64:decode(Base64Binary),
+    Key = make_key(Cipher, Hash, Iterations, PassPhrase, Salt),
+    unpad(crypto:block_decrypt(Cipher, Key, Ivec, Binary)).
+
+%% Generate a key from a passphrase.
+
+make_key(Cipher, Hash, Iterations, PassPhrase, Salt) ->
+    Key = pbdkdf2(PassPhrase, Salt, Iterations, key_length(Cipher),
+        fun crypto:hmac/4, Hash, hash_length(Hash)),
+    if
+        Cipher =:= des3_cbc; Cipher =:= des3_cbf; Cipher =:= des3_cfb; Cipher =:= des_ede3 ->
+            << A:8/binary, B:8/binary, C:8/binary >> = Key,
+            [A, B, C];
+        true ->
+            Key
+    end.
+
+%% Functions to pad/unpad input to a multiplier of block size.
+
+pad(Cipher, Data) ->
+    BlockSize = block_size(Cipher),
+    N = BlockSize - (byte_size(Data) rem BlockSize),
+    Pad = list_to_binary(lists:duplicate(N, N)),
+    <<Data/binary, Pad/binary>>.
+
+unpad(Data) ->
+    N = binary:last(Data),
+    binary:part(Data, 0, byte_size(Data) - N).
+
+%% These functions are necessary because the current Erlang crypto interface
+%% is lacking interfaces to the following OpenSSL functions:
+%%
+%% * int EVP_MD_size(const EVP_MD *md);
+%% * int EVP_CIPHER_iv_length(const EVP_CIPHER *e);
+%% * int EVP_CIPHER_key_length(const EVP_CIPHER *e);
+%% * int EVP_CIPHER_block_size(const EVP_CIPHER *e);
+
+hash_length(md4) -> 16;
+hash_length(md5) -> 16;
+hash_length(sha) -> 20;
+hash_length(sha224) -> 28;
+hash_length(sha256) -> 32;
+hash_length(sha384) -> 48;
+hash_length(sha512) -> 64.
+
+iv_length(des_cbc) -> 8;
+iv_length(des_cfb) -> 8;
+iv_length(des3_cbc) -> 8;
+iv_length(des3_cbf) -> 8;
+iv_length(des3_cfb) -> 8;
+iv_length(des_ede3) -> 8;
+iv_length(blowfish_cbc) -> 8;
+iv_length(blowfish_cfb64) -> 8;
+iv_length(blowfish_ofb64) -> 8;
+iv_length(rc2_cbc) -> 8;
+iv_length(aes_cbc) -> 16;
+iv_length(aes_cbc128) -> 16;
+iv_length(aes_cfb8) -> 16;
+iv_length(aes_cfb128) -> 16;
+iv_length(aes_cbc256) -> 16;
+iv_length(aes_ige256) -> 32.
+
+key_length(des_cbc) -> 8;
+key_length(des_cfb) -> 8;
+key_length(des3_cbc) -> 24;
+key_length(des3_cbf) -> 24;
+key_length(des3_cfb) -> 24;
+key_length(des_ede3) -> 24;
+key_length(blowfish_cbc) -> 16;
+key_length(blowfish_cfb64) -> 16;
+key_length(blowfish_ofb64) -> 16;
+key_length(rc2_cbc) -> 16;
+key_length(aes_cbc) -> 16;
+key_length(aes_cbc128) -> 16;
+key_length(aes_cfb8) -> 16;
+key_length(aes_cfb128) -> 16;
+key_length(aes_cbc256) -> 32;
+key_length(aes_ige256) -> 16.
+
+block_size(aes_cbc256) -> 32;
+block_size(aes_cbc128) -> 32;
+block_size(aes_ige256) -> 32;
+block_size(aes_cbc) -> 32;
+block_size(_) -> 8.
+
+%% The following was taken from OTP's lib/public_key/src/pubkey_pbe.erl
+%%
+%% This is an undocumented interface to password-based encryption algorithms.
+%% These functions have been copied here to stay compatible with R16B03.
+
+%%--------------------------------------------------------------------
+-spec pbdkdf2(string(), iodata(), integer(), integer(), fun(), atom(), integer())
+            -> binary().
+%%
+%% Description: Implements password based decryption key derive function 2.
+%% Exported mainly for testing purposes.
+%%--------------------------------------------------------------------
+pbdkdf2(Password, Salt, Count, DerivedKeyLen, Prf, PrfHash, PrfOutputLen)->
+    NumBlocks = ceiling(DerivedKeyLen / PrfOutputLen),
+    NumLastBlockOctets = DerivedKeyLen - (NumBlocks - 1) * PrfOutputLen ,
+    blocks(NumBlocks, NumLastBlockOctets, 1, Password, Salt,
+          Count, Prf, PrfHash, PrfOutputLen, <<>>).
+
+blocks(1, N, Index, Password, Salt, Count, Prf, PrfHash, PrfLen, Acc) ->
+    <<XorSum:N/binary, _/binary>> = xor_sum(Password, Salt, Count, Index, Prf, PrfHash, PrfLen),
+    <<Acc/binary, XorSum/binary>>;
+blocks(NumBlocks, N, Index, Password, Salt, Count, Prf, PrfHash, PrfLen, Acc) ->
+    XorSum = xor_sum(Password, Salt, Count, Index, Prf, PrfHash, PrfLen),
+    blocks(NumBlocks -1, N, Index +1, Password, Salt, Count, Prf, PrfHash,
+          PrfLen, <<Acc/binary, XorSum/binary>>).
+
+xor_sum(Password, Salt, Count, Index, Prf, PrfHash, PrfLen) ->
+    Result = Prf(PrfHash, Password, [Salt,<<Index:32/unsigned-big-integer>>], PrfLen),
+    do_xor_sum(Prf, PrfHash, PrfLen, Result, Password, Count-1, Result).
+
+do_xor_sum(_, _, _, _, _, 0, Acc) ->
+    Acc;
+do_xor_sum(Prf, PrfHash, PrfLen, Prev, Password, Count, Acc) ->
+    Result = Prf(PrfHash, Password, Prev, PrfLen),
+    do_xor_sum(Prf, PrfHash, PrfLen, Result, Password, Count-1, crypto:exor(Acc, Result)).
+
+ceiling(Float) ->
+    erlang:round(Float + 0.5).
similarity index 88%
rename from rabbitmq-server/src/rabbit_plugins.erl
rename to deps/rabbit/src/rabbit_plugins.erl
index 329703234640be1e3d12afb6bbb229521095cde5..4d8966f7e27adadcd37fd50da7fcc4096b2f9712 100644 (file)
 -module(rabbit_plugins).
 -include("rabbit.hrl").
 
--export([setup/0, active/0, read_enabled/1, list/1, dependencies/3]).
+-export([setup/0, active/0, read_enabled/1, list/1, list/2, dependencies/3]).
 -export([ensure/1]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type plugin_name() :: atom().
 
--type(plugin_name() :: atom()).
-
--spec(setup/0 :: () -> [plugin_name()]).
--spec(active/0 :: () -> [plugin_name()]).
--spec(list/1 :: (string()) -> [#plugin{}]).
--spec(read_enabled/1 :: (file:filename()) -> [plugin_name()]).
--spec(dependencies/3 :: (boolean(), [plugin_name()], [#plugin{}]) ->
-                             [plugin_name()]).
--spec(ensure/1  :: (string()) -> {'ok', [atom()], [atom()]} | {error, any()}).
--endif.
+-spec setup() -> [plugin_name()].
+-spec active() -> [plugin_name()].
+-spec list(string()) -> [#plugin{}].
+-spec list(string(), boolean()) -> [#plugin{}].
+-spec read_enabled(file:filename()) -> [plugin_name()].
+-spec dependencies(boolean(), [plugin_name()], [#plugin{}]) ->
+                             [plugin_name()].
+-spec ensure(string()) -> {'ok', [atom()], [atom()]} | {error, any()}.
 
 %%----------------------------------------------------------------------------
 
@@ -87,14 +85,31 @@ active() ->
 
 %% @doc Get the list of plugins which are ready to be enabled.
 list(PluginsDir) ->
+    list(PluginsDir, false).
+
+list(PluginsDir, IncludeRequiredDeps) ->
     EZs = [{ez, EZ} || EZ <- filelib:wildcard("*.ez", PluginsDir)],
     FreeApps = [{app, App} ||
                    App <- filelib:wildcard("*/ebin/*.app", PluginsDir)],
+    %% We load the "rabbit" application to be sure we can get the
+    %% "applications" key. This is required for rabbitmq-plugins for
+    %% instance.
+    application:load(rabbit),
+    {ok, RabbitDeps} = application:get_key(rabbit, applications),
     {AvailablePlugins, Problems} =
         lists:foldl(fun ({error, EZ, Reason}, {Plugins1, Problems1}) ->
                             {Plugins1, [{EZ, Reason} | Problems1]};
-                        (Plugin = #plugin{}, {Plugins1, Problems1}) ->
-                            {[Plugin|Plugins1], Problems1}
+                        (Plugin = #plugin{name = Name}, {Plugins1, Problems1}) ->
+                            %% Applications RabbitMQ depends on (eg.
+                            %% "rabbit_common") can't be considered
+                            %% plugins, otherwise rabbitmq-plugins would
+                            %% list them and the user may believe he can
+                            %% disable them.
+                            case IncludeRequiredDeps orelse
+                              not lists:member(Name, RabbitDeps) of
+                                true  -> {[Plugin|Plugins1], Problems1};
+                                false -> {Plugins1, Problems1}
+                            end
                     end, {[], []},
                     [plugin_info(PluginsDir, Plug) || Plug <- EZs ++ FreeApps]),
     case Problems of
similarity index 97%
rename from rabbitmq-server/src/rabbit_plugins_main.erl
rename to deps/rabbit/src/rabbit_plugins_main.erl
index a4d5490c0959dce1948691c682c2bc43514cfa5b..ff516268c6e58e2e76c90802027b8c7407bf6acc 100644 (file)
          {enable, [?OFFLINE_DEF, ?ONLINE_DEF]},
          {disable, [?OFFLINE_DEF, ?ONLINE_DEF]},
          {set, [?OFFLINE_DEF, ?ONLINE_DEF]},
-         {sync, []}]).
+         {sync, []},
+         {help, []}]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start/0 :: () -> no_return()).
--spec(stop/0 :: () -> 'ok').
-
--endif.
+-spec start() -> no_return().
+-spec stop() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -147,7 +144,10 @@ action(disable, Node, ToDisable0, Opts, State = #cli{all      = All,
     action_change(Opts, Node, Implicit, NewImplicit, State);
 
 action(sync, Node, [], _Opts, State) ->
-    sync(Node, true, State).
+    sync(Node, true, State);
+
+action(help, _Node, _Args, _Opts, _State) ->
+    io:format("~s", [rabbit_plugins_usage:usage()]).
 
 %%----------------------------------------------------------------------------
 
@@ -169,7 +169,7 @@ format_plugins(Node, Pattern, Opts, #cli{all      = All,
 
     EnabledImplicitly = Implicit -- Enabled,
     {StatusMsg, Running} =
-        case rabbit_cli:rpc_call(Node, rabbit_plugins, active, []) of
+        case rabbit_misc:rpc_call(Node, rabbit_plugins, active, []) of
             {badrpc, _} -> {"[failed to contact ~s - status not shown]", []};
             Active      -> {"* = running on ~s", Active}
         end,
@@ -275,7 +275,7 @@ sync(Node, ForceOnline, #cli{file = File}) ->
 
 rpc_call(Node, Online, Mod, Fun, Args) ->
     io:format("~nApplying plugin configuration to ~s...", [Node]),
-    case rabbit_cli:rpc_call(Node, Mod, Fun, Args) of
+    case rabbit_misc:rpc_call(Node, Mod, Fun, Args) of
         {ok, [], []} ->
             io:format(" nothing to do.~n", []);
         {ok, Start, []} ->
similarity index 87%
rename from rabbitmq-server/src/rabbit_policies.erl
rename to deps/rabbit/src/rabbit_policies.erl
index 65f3801e3e668c7e39e9956541a79f0fd91482a5..c7d4c99f370184f4bf26908a07e083ac45ebc11f 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_policies).
+
+%% Provides built-in policy parameter
+%% validation functions.
+
 -behaviour(rabbit_policy_validator).
 
 -include("rabbit.hrl").
@@ -35,7 +39,8 @@ register() ->
                           {policy_validator, <<"message-ttl">>},
                           {policy_validator, <<"expires">>},
                           {policy_validator, <<"max-length">>},
-                          {policy_validator, <<"max-length-bytes">>}]],
+                          {policy_validator, <<"max-length-bytes">>},
+                          {policy_validator, <<"queue-mode">>}]],
     ok.
 
 validate_policy(Terms) ->
@@ -83,4 +88,11 @@ validate_policy0(<<"max-length-bytes">>, Value)
   when is_integer(Value), Value >= 0 ->
     ok;
 validate_policy0(<<"max-length-bytes">>, Value) ->
-    {error, "~p is not a valid maximum length in bytes", [Value]}.
+    {error, "~p is not a valid maximum length in bytes", [Value]};
+
+validate_policy0(<<"queue-mode">>, <<"default">>) ->
+    ok;
+validate_policy0(<<"queue-mode">>, <<"lazy">>) ->
+    ok;
+validate_policy0(<<"queue-mode">>, Value) ->
+    {error, "~p is not a valid queue-mode value", [Value]}.
similarity index 90%
rename from rabbitmq-server/src/rabbit_policy.erl
rename to deps/rabbit/src/rabbit_policy.erl
index 5bf5483272432e7930c56f54440dd4a191d48bbe..a9caadf97289ca0e85c2d04c6bc40c0fcda6b701 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_policy).
 
+%% Policies is a way to apply optional arguments ("x-args")
+%% to exchanges and queues in bulk, using name matching.
+%%
+%% Only one policy can apply to a given queue or exchange
+%% at a time. Priorities help determine what policy should
+%% take precedence.
+%%
+%% Policies build on runtime parameters. Policy-driven parameters
+%% are well known and therefore validated.
+%%
+%% See also:
+%%
+%%  * rabbit_runtime_parameters
+%%  * rabbit_policies
+%%  * rabbit_registry
+
 %% TODO specs
 
 -behaviour(rabbit_runtime_parameter).
@@ -29,7 +45,7 @@
 -export([name/1, get/2, get_arg/3, set/1]).
 -export([validate/5, notify/4, notify_clear/3]).
 -export([parse_set/6, set/6, delete/2, lookup/2, list/0, list/1,
-         list_formatted/1, info_keys/0]).
+         list_formatted/1, list_formatted/3, info_keys/0]).
 
 -rabbit_boot_step({?MODULE,
                    [{description, "policy parameters"},
@@ -170,6 +186,10 @@ list(VHost) ->
 list_formatted(VHost) ->
     order_policies(list0(VHost, fun format/1)).
 
+list_formatted(VHost, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(AggregatorPid, Ref,
+                                     fun(P) -> P end, list_formatted(VHost)).
+
 list0(VHost, DefnFun) ->
     [p(P, DefnFun) || P <- rabbit_runtime_parameters:list(VHost, <<"policy">>)].
 
@@ -201,11 +221,11 @@ validate(_VHost, <<"policy">>, Name, Term, _User) ->
       Name, policy_validation(), Term).
 
 notify(VHost, <<"policy">>, Name, Term) ->
-    rabbit_event:notify(policy_set, [{name, Name} | Term]),
+    rabbit_event:notify(policy_set, [{name, Name}, {vhost, VHost} | Term]),
     update_policies(VHost).
 
 notify_clear(VHost, <<"policy">>, Name) ->
-    rabbit_event:notify(policy_cleared, [{name, Name}]),
+    rabbit_event:notify(policy_cleared, [{name, Name}, {vhost, VHost}]),
     update_policies(VHost).
 
 %%----------------------------------------------------------------------------
@@ -222,8 +242,10 @@ update_policies(VHost) ->
                  fun() ->
                          [mnesia:lock({table, T}, write) || T <- Tabs], %% [1]
                          case catch list(VHost) of
-                             {error, {no_such_vhost, _}} ->
-                                 ok; %% [2]
+                             {'EXIT', {throw, {error, {no_such_vhost, _}}}} ->
+                                 {[], []}; %% [2]
+                             {'EXIT', Exit} ->
+                                 exit(Exit);
                              Policies ->
                                  {[update_exchange(X, Policies) ||
                                       X <- rabbit_exchange:list(VHost)],
@@ -254,7 +276,9 @@ update_queue(Q = #amqqueue{name = QName, policy = OldPolicy}, Policies) ->
         NewPolicy -> case rabbit_amqqueue:update(
                        QName, fun(Q1) ->
                                       rabbit_queue_decorator:set(
-                                        Q1#amqqueue{policy = NewPolicy})
+                                        Q1#amqqueue{policy = NewPolicy,
+                                            policy_version =
+                                            Q1#amqqueue.policy_version + 1 })
                               end) of
                          #amqqueue{} = Q1 -> {Q, Q1};
                          not_found        -> {Q, Q }
similarity index 84%
rename from rabbitmq-server/src/rabbit_prelaunch.erl
rename to deps/rabbit/src/rabbit_prelaunch.erl
index 13455abb0a8831bdbef0905207db4e3c7c7cb5ee..569a8d6c5a082eea048f08d0093fb3a5168e0a35 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_prelaunch).
 %% Specs
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start/0 :: () -> no_return()).
--spec(stop/0 :: () -> 'ok').
-
--endif.
+-spec start() -> no_return().
+-spec stop() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -113,16 +109,25 @@ dist_port_use_check(NodeHost) ->
     case os:getenv("RABBITMQ_DIST_PORT") of
         false   -> ok;
         PortStr -> Port = list_to_integer(PortStr),
-                   case gen_tcp:listen(Port, [inet, {reuseaddr, true}]) of
-                       {ok, Sock} -> gen_tcp:close(Sock);
-                       {error, _} -> dist_port_use_check_fail(Port, NodeHost)
-                   end
+                  dist_port_use_check_ipv4(NodeHost, Port)
+    end.
+
+dist_port_use_check_ipv4(NodeHost, Port) ->
+    case gen_tcp:listen(Port, [inet, {reuseaddr, true}]) of
+       {ok, Sock} -> gen_tcp:close(Sock);
+       {error, einval} -> dist_port_use_check_ipv6(NodeHost, Port);
+       {error, _} -> dist_port_use_check_fail(Port, NodeHost)
+    end.
+
+dist_port_use_check_ipv6(NodeHost, Port) ->
+    case gen_tcp:listen(Port, [inet6, {reuseaddr, true}]) of
+       {ok, Sock} -> gen_tcp:close(Sock);
+       {error, _} -> dist_port_use_check_fail(Port, NodeHost)
     end.
 
--ifdef(use_specs).
--spec(dist_port_use_check_fail/2 :: (non_neg_integer(), string()) ->
-                                         no_return()).
--endif.
+-spec dist_port_use_check_fail(non_neg_integer(), string()) ->
+                                         no_return().
+
 dist_port_use_check_fail(Port, Host) ->
     {ok, Names} = rabbit_nodes:names(Host),
     case [N || {N, P} <- Names, P =:= Port] of
similarity index 94%
rename from rabbitmq-server/src/rabbit_prequeue.erl
rename to deps/rabbit/src/rabbit_prequeue.erl
index af96ea9f6fd2a45c69d3b0efa9509e0afc6707d3..5b2c24acab141bbbe66baaf37604582d4150bb19 100644 (file)
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([start_mode/0]).
 
--type(start_mode() :: 'declare' | 'recovery' | 'slave').
-
--spec(start_link/3 :: (rabbit_types:amqqueue(), start_mode(), pid())
-                      -> rabbit_types:ok_pid_or_error()).
+-type start_mode() :: 'declare' | 'recovery' | 'slave'.
 
--endif.
+-spec start_link(rabbit_types:amqqueue(), start_mode(), pid())
+                      -> rabbit_types:ok_pid_or_error().
 
 %%----------------------------------------------------------------------------
 
similarity index 80%
rename from rabbitmq-server/src/rabbit_priority_queue.erl
rename to deps/rabbit/src/rabbit_priority_queue.erl
index 206d674abc87d19915a2c60f9d7d4e4edba3e8ea..b7a3afd129561b32509785f70112b26229fa7243 100644 (file)
 -export([init/3, terminate/2, delete_and_terminate/2, delete_crashed/1,
          purge/1, purge_acks/1,
          publish/6, publish_delivered/5, discard/4, drain_confirmed/1,
+         batch_publish/4, batch_publish_delivered/4,
          dropwhile/2, fetchwhile/4, fetch/2, drop/2, ack/2, requeue/2,
          ackfold/4, fold/3, len/1, is_empty/1, depth/1,
          set_ram_duration_target/2, ram_duration/1, needs_timeout/1, timeout/1,
          handle_pre_hibernate/1, resume/1, msg_rates/1,
-         info/2, invoke/3, is_duplicate/2]).
+         info/2, invoke/3, is_duplicate/2, set_queue_mode/2,
+         zip_msgs_and_acks/4]).
 
--record(state, {bq, bqss}).
+-record(state, {bq, bqss, max_priority}).
 -record(passthrough, {bq, bqs}).
 
 %% See 'note on suffixes' below
@@ -124,7 +126,7 @@ collapse_recovery(QNames, DupNames, Recovery) ->
     [dict:fetch(Name, NameToTerms) || Name <- QNames].
 
 priorities(#amqqueue{arguments = Args}) ->
-    Ints = [long, short, signedint, byte],
+    Ints = [long, short, signedint, byte, unsignedbyte, unsignedshort, unsignedint],
     case rabbit_misc:table_lookup(Args, <<"x-max-priority">>) of
         {Type, Max} -> case lists:member(Type, Ints) of
                            false -> none;
@@ -155,7 +157,8 @@ init(Q, Recover, AsyncCallback) ->
                                     [{P, Init(P, Term)} || {P, Term} <- PsTerms]
                        end,
                 #state{bq   = BQ,
-                       bqss = BQSs}
+                       bqss = BQSs,
+                       max_priority = hd(Ps)}
     end.
 %% [0] collapse_recovery has the effect of making a list of recovery
 %% terms in priority order, even for non priority queues. It's easier
@@ -203,6 +206,18 @@ publish(Msg, MsgProps, IsDelivered, ChPid, Flow,
         State = #passthrough{bq = BQ, bqs = BQS}) ->
     ?passthrough1(publish(Msg, MsgProps, IsDelivered, ChPid, Flow, BQS)).
 
+batch_publish(Publishes, ChPid, Flow, State = #state{bq = BQ, bqss = [{MaxP, _} |_]}) ->
+    PubDict = partition_publish_batch(Publishes, MaxP),
+    lists:foldl(
+      fun ({Priority, Pubs}, St) ->
+              pick1(fun (_P, BQSN) ->
+                            BQ:batch_publish(Pubs, ChPid, Flow, BQSN)
+                    end, Priority, St)
+      end, State, orddict:to_list(PubDict));
+batch_publish(Publishes, ChPid, Flow,
+              State = #passthrough{bq = BQ, bqs = BQS}) ->
+    ?passthrough1(batch_publish(Publishes, ChPid, Flow, BQS)).
+
 publish_delivered(Msg, MsgProps, ChPid, Flow, State = #state{bq = BQ}) ->
     pick2(fun (P, BQSN) ->
                   {AckTag, BQSN1} = BQ:publish_delivered(
@@ -213,6 +228,25 @@ publish_delivered(Msg, MsgProps, ChPid, Flow,
                   State = #passthrough{bq = BQ, bqs = BQS}) ->
     ?passthrough2(publish_delivered(Msg, MsgProps, ChPid, Flow, BQS)).
 
+batch_publish_delivered(Publishes, ChPid, Flow, State = #state{bq = BQ, bqss = [{MaxP, _} |_]}) ->
+    PubDict = partition_publish_delivered_batch(Publishes, MaxP),
+    {PrioritiesAndAcks, State1} =
+        lists:foldl(
+          fun ({Priority, Pubs}, {PriosAndAcks, St}) ->
+                  {PriosAndAcks1, St1} =
+                      pick2(fun (P, BQSN) ->
+                                    {AckTags, BQSN1} =
+                                        BQ:batch_publish_delivered(
+                                          Pubs, ChPid, Flow, BQSN),
+                                    {priority_on_acktags(P, AckTags), BQSN1}
+                            end, Priority, St),
+                  {[PriosAndAcks1 | PriosAndAcks], St1}
+          end, {[], State}, orddict:to_list(PubDict)),
+    {lists:reverse(PrioritiesAndAcks), State1};
+batch_publish_delivered(Publishes, ChPid, Flow,
+                        State = #passthrough{bq = BQ, bqs = BQS}) ->
+    ?passthrough2(batch_publish_delivered(Publishes, ChPid, Flow, BQS)).
+
 %% TODO this is a hack. The BQ api does not give us enough information
 %% here - if we had the Msg we could look at its priority and forward
 %% to the appropriate sub-BQ. But we don't so we are stuck.
@@ -371,11 +405,12 @@ msg_rates(#state{bq = BQ, bqss = BQSs}) ->
           end, {0.0, 0.0}, BQSs);
 msg_rates(#passthrough{bq = BQ, bqs = BQS}) ->
     BQ:msg_rates(BQS).
-
 info(backing_queue_status, #state{bq = BQ, bqss = BQSs}) ->
     fold0(fun (P, BQSN, Acc) ->
                   combine_status(P, BQ:info(backing_queue_status, BQSN), Acc)
           end, nothing, BQSs);
+info(head_message_timestamp, #state{bq = BQ, bqss = BQSs}) ->
+    find_head_message_timestamp(BQ, BQSs, '');
 info(Item, #state{bq = BQ, bqss = BQSs}) ->
     fold0(fun (_P, BQSN, Acc) ->
                   Acc + BQ:info(Item, BQSN)
@@ -385,6 +420,8 @@ info(Item, #passthrough{bq = BQ, bqs = BQS}) ->
 
 invoke(Mod, {P, Fun}, State = #state{bq = BQ}) ->
     pick1(fun (_P, BQSN) -> BQ:invoke(Mod, Fun, BQSN) end, P, State);
+invoke(Mod, Fun, State = #state{bq = BQ, max_priority = P}) ->
+    pick1(fun (_P, BQSN) -> BQ:invoke(Mod, Fun, BQSN) end, P, State);
 invoke(Mod, Fun, State = #passthrough{bq = BQ, bqs = BQS}) ->
     ?passthrough1(invoke(Mod, Fun, BQS)).
 
@@ -393,6 +430,23 @@ is_duplicate(Msg, State = #state{bq = BQ}) ->
 is_duplicate(Msg, State = #passthrough{bq = BQ, bqs = BQS}) ->
     ?passthrough2(is_duplicate(Msg, BQS)).
 
+set_queue_mode(Mode, State = #state{bq = BQ}) ->
+    foreach1(fun (_P, BQSN) -> BQ:set_queue_mode(Mode, BQSN) end, State);
+set_queue_mode(Mode, State = #passthrough{bq = BQ, bqs = BQS}) ->
+    ?passthrough1(set_queue_mode(Mode, BQS)).
+
+zip_msgs_and_acks(Msgs, AckTags, Accumulator, #state{bqss = [{MaxP, _} |_]}) ->
+    MsgsByPriority = partition_publish_delivered_batch(Msgs, MaxP),
+    lists:foldl(fun (Acks, MAs) ->
+                        {P, _AckTag} = hd(Acks),
+                        Pubs = orddict:fetch(P, MsgsByPriority),
+                        MAs0 = zip_msgs_and_acks(Pubs, Acks),
+                        MAs ++ MAs0
+                end, Accumulator, AckTags);
+zip_msgs_and_acks(Msgs, AckTags, Accumulator,
+                  #passthrough{bq = BQ, bqs = BQS}) ->
+    BQ:zip_msgs_and_acks(Msgs, AckTags, Accumulator, BQS).
+
 %%----------------------------------------------------------------------------
 
 bq() ->
@@ -432,13 +486,14 @@ foreach1(_Fun, [], BQSAcc) ->
 
 %% For a given thing, just go to its BQ
 pick1(Fun, Prioritisable, #state{bqss = BQSs} = State) ->
-    {P, BQSN} = priority(Prioritisable, BQSs),
+    {P, BQSN} = priority_bq(Prioritisable, BQSs),
     a(State#state{bqss = bq_store(P, Fun(P, BQSN), BQSs)}).
 
 %% Fold over results
 fold2(Fun, Acc, State = #state{bqss = BQSs}) ->
     {Res, BQSs1} = fold2(Fun, Acc, BQSs, []),
     {Res, a(State#state{bqss = BQSs1})}.
+
 fold2(Fun, Acc, [{P, BQSN} | Rest], BQSAcc) ->
     {Acc1, BQSN1} = Fun(P, BQSN, Acc),
     fold2(Fun, Acc1, Rest, [{P, BQSN1} | BQSAcc]);
@@ -480,7 +535,7 @@ fold_by_acktags2(Fun, AckTags, State) ->
 
 %% For a given thing, just go to its BQ
 pick2(Fun, Prioritisable, #state{bqss = BQSs} = State) ->
-    {P, BQSN} = priority(Prioritisable, BQSs),
+    {P, BQSN} = priority_bq(Prioritisable, BQSs),
     {Res, BQSN1} = Fun(P, BQSN),
     {Res, a(State#state{bqss = bq_store(P, BQSN1, BQSs)})}.
 
@@ -511,8 +566,8 @@ findfold3(Fun, Acc, NotFound, [{P, BQSN} | Rest], BQSAcc) ->
 findfold3(_Fun, Acc, NotFound, [], BQSAcc) ->
     {NotFound, Acc, lists:reverse(BQSAcc)}.
 
-bq_fetch(P, [])               -> exit({not_found, P});
-bq_fetch(P, [{P,  BQSN} | _]) -> BQSN;
+bq_fetch(P, []) -> exit({not_found, P});
+bq_fetch(P, [{P,  BQSN} | _]) -> {P, BQSN};
 bq_fetch(P, [{_, _BQSN} | T]) -> bq_fetch(P, T).
 
 bq_store(P, BQS, BQSs) ->
@@ -530,25 +585,41 @@ a(State = #state{bqss = BQSs}) ->
     end.
 
 %%----------------------------------------------------------------------------
-
-priority(P, BQSs) when is_integer(P) ->
-    {P, bq_fetch(P, BQSs)};
-priority(#basic_message{content = Content}, BQSs) ->
-    priority1(rabbit_binary_parser:ensure_content_decoded(Content), BQSs).
-
-priority1(_Content, [{P, BQSN}]) ->
-    {P, BQSN};
-priority1(Content = #content{properties = Props},
-         [{P, BQSN} | Rest]) ->
+partition_publish_batch(Publishes, MaxP) ->
+    partition_publishes(
+      Publishes, fun ({Msg, _, _}) -> Msg end, MaxP).
+
+partition_publish_delivered_batch(Publishes, MaxP) ->
+    partition_publishes(
+      Publishes, fun ({Msg, _}) -> Msg end, MaxP).
+
+partition_publishes(Publishes, ExtractMsg, MaxP) ->
+    Partitioned =
+        lists:foldl(fun (Pub, Dict) ->
+                            Msg = ExtractMsg(Pub),
+                            rabbit_misc:orddict_cons(priority(Msg, MaxP), Pub, Dict)
+                    end, orddict:new(), Publishes),
+    orddict:map(fun (_P, RevPubs) ->
+                        lists:reverse(RevPubs)
+                end, Partitioned).
+
+
+priority_bq(Priority, [{MaxP, _} | _] = BQSs) ->
+    bq_fetch(priority(Priority, MaxP), BQSs).
+
+%% Messages with a priority which is higher than the queue's maximum are treated
+%% as if they were published with the maximum priority.
+priority(undefined, _MaxP) ->
+    0;
+priority(Priority, MaxP) when is_integer(Priority), Priority =< MaxP ->
+    Priority;
+priority(Priority, MaxP) when is_integer(Priority), Priority > MaxP ->
+    MaxP;
+priority(#basic_message{content = Content}, MaxP) ->
+    priority(rabbit_binary_parser:ensure_content_decoded(Content), MaxP);
+priority(#content{properties = Props}, MaxP) ->
     #'P_basic'{priority = Priority0} = Props,
-    Priority = case Priority0 of
-                   undefined                    -> 0;
-                   _ when is_integer(Priority0) -> Priority0
-               end,
-    case Priority >= P of
-        true  -> {P, BQSN};
-        false -> priority1(Content, Rest)
-    end.
+    priority(Priority0, MaxP).
 
 add_maybe_infinity(infinity, _) -> infinity;
 add_maybe_infinity(_, infinity) -> infinity;
@@ -579,6 +650,32 @@ combine_status(P, New, Old) ->
 
 cse(infinity, _)            -> infinity;
 cse(_, infinity)            -> infinity;
+%% queue modes
+cse(_, default)             -> default;
+cse(default, _)             -> default;
+cse(_, lazy)                -> lazy;
+cse(lazy, _)                -> lazy;
+%% numerical stats
 cse(A, B) when is_number(A) -> A + B;
 cse({delta, _, _, _}, _)    -> {delta, todo, todo, todo};
 cse(A, B)                   -> exit({A, B}).
+
+%% When asked about 'head_message_timestamp' fro this priority queue, we
+%% walk all the backing queues, starting by the highest priority. Once a
+%% backing queue having messages (ready or unacknowledged) is found, its
+%% 'head_message_timestamp' is returned even if it is null.
+
+find_head_message_timestamp(BQ, [{_, BQSN} | Rest], Timestamp) ->
+    MsgCount = BQ:len(BQSN) + BQ:info(messages_unacknowledged_ram, BQSN),
+    if
+        MsgCount =/= 0 -> BQ:info(head_message_timestamp, BQSN);
+        true           -> find_head_message_timestamp(BQ, Rest, Timestamp)
+    end;
+find_head_message_timestamp(_, [], Timestamp) ->
+    Timestamp.
+
+zip_msgs_and_acks(Pubs, AckTags) ->
+    lists:zipwith(
+      fun ({#basic_message{ id = Id }, _Props}, AckTag) ->
+                  {Id, AckTag}
+      end, Pubs, AckTags).
similarity index 96%
rename from rabbitmq-server/src/rabbit_queue_consumers.erl
rename to deps/rabbit/src/rabbit_queue_consumers.erl
index ae8481aaf8a4fbc1dfaac59f77ac20a3d537d1d5..a8002398e7c5f85e5697990fd35a0087844b8ad6 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_queue_consumers).
@@ -49,8 +49,6 @@
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -type time_micros() :: non_neg_integer().
 -type ratio() :: float().
 -type state() :: #state{consumers ::priority_queue:q(),
              state()) -> 'unchanged' | {'unblocked', state()}.
 -spec utilisation(state()) -> ratio().
 
--endif.
-
 %%----------------------------------------------------------------------------
 
 new() -> #state{consumers = priority_queue:new(),
-                use       = {active, now_micros(), 1.0}}.
+                use       = {active,
+                             time_compat:monotonic_time(micro_seconds),
+                             1.0}}.
 
 max_active_priority(#state{consumers = Consumers}) ->
     priority_queue:highest(Consumers).
@@ -279,7 +277,9 @@ subtract_acks([T | TL] = AckTags, Prefix, CTagCounts, AckQ) ->
             subtract_acks(TL, Prefix,
                           orddict:update_counter(CTag, 1, CTagCounts), QTail);
         {{value, V}, QTail} ->
-            subtract_acks(AckTags, [V | Prefix], CTagCounts, QTail)
+            subtract_acks(AckTags, [V | Prefix], CTagCounts, QTail);
+        {empty, _} -> 
+            subtract_acks([], Prefix, CTagCounts, AckQ)
     end.
 
 possibly_unblock(Update, ChPid, State) ->
@@ -346,9 +346,9 @@ drain_mode(true)  -> drain;
 drain_mode(false) -> manual.
 
 utilisation(#state{use = {active, Since, Avg}}) ->
-    use_avg(now_micros() - Since, 0, Avg);
+    use_avg(time_compat:monotonic_time(micro_seconds) - Since, 0, Avg);
 utilisation(#state{use = {inactive, Since, Active, Avg}}) ->
-    use_avg(Active, now_micros() - Since, Avg).
+    use_avg(Active, time_compat:monotonic_time(micro_seconds) - Since, Avg).
 
 %%----------------------------------------------------------------------------
 
@@ -455,14 +455,14 @@ update_use({inactive, _, _, _}   = CUInfo, inactive) ->
 update_use({active,   _, _}      = CUInfo,   active) ->
     CUInfo;
 update_use({active,   Since,         Avg}, inactive) ->
-    Now = now_micros(),
+    Now = time_compat:monotonic_time(micro_seconds),
     {inactive, Now, Now - Since, Avg};
 update_use({inactive, Since, Active, Avg},   active) ->
-    Now = now_micros(),
+    Now = time_compat:monotonic_time(micro_seconds),
     {active, Now, use_avg(Active, Now - Since, Avg)}.
 
+use_avg(0, 0, Avg) ->
+    Avg;
 use_avg(Active, Inactive, Avg) ->
     Time = Inactive + Active,
     rabbit_misc:moving_average(Time, ?USE_AVG_HALF_LIFE, Active / Time, Avg).
-
-now_micros() -> timer:now_diff(now(), {0,0,0}).
similarity index 94%
rename from rabbitmq-server/src/rabbit_queue_index.erl
rename to deps/rabbit/src/rabbit_queue_index.erl
index 176f65b18b6b95564a15379b824439dcf507b419..6a14854882382fab12ceb9da5c4ccfb453a29bd9 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_queue_index).
 %% simplifies and clarifies the code.
 %%
 %% For notes on Clean Shutdown and startup, see documentation in
-%% variable_queue.
+%% rabbit_variable_queue.
 %%
 %%----------------------------------------------------------------------------
 
 -rabbit_upgrade({store_msg_size, local, [avoid_zeroes]}).
 -rabbit_upgrade({store_msg,      local, [store_msg_size]}).
 
--ifdef(use_specs).
-
--type(hdl() :: ('undefined' | any())).
--type(segment() :: ('undefined' |
+-type hdl() :: ('undefined' | any()).
+-type segment() :: ('undefined' |
                     #segment { num                :: non_neg_integer(),
                                path               :: file:filename(),
-                               journal_entries    :: array:array(),
-                               entries_to_segment :: array:array(),
+                               journal_entries    :: ?ARRAY_TYPE(),
+                               entries_to_segment :: ?ARRAY_TYPE(),
                                unacked            :: non_neg_integer()
-                             })).
--type(seq_id() :: integer()).
--type(seg_dict() :: {dict:dict(), [segment()]}).
--type(on_sync_fun() :: fun ((gb_sets:set()) -> ok)).
--type(qistate() :: #qistate { dir                 :: file:filename(),
+                             }).
+-type seq_id() :: integer().
+-type seg_dict() :: {?DICT_TYPE(), [segment()]}.
+-type on_sync_fun() :: fun ((?GB_SET_TYPE()) -> ok).
+-type qistate() :: #qistate { dir                 :: file:filename(),
                               segments            :: 'undefined' | seg_dict(),
                               journal_handle      :: hdl(),
                               dirty_count         :: integer(),
                               max_journal_entries :: non_neg_integer(),
                               on_sync             :: on_sync_fun(),
                               on_sync_msg         :: on_sync_fun(),
-                              unconfirmed         :: gb_sets:set(),
-                              unconfirmed_msg     :: gb_sets:set(),
+                              unconfirmed         :: ?GB_SET_TYPE(),
+                              unconfirmed_msg     :: ?GB_SET_TYPE(),
                               pre_publish_cache   :: list(),
                               delivered_cache     :: list()
-                            }).
--type(contains_predicate() :: fun ((rabbit_types:msg_id()) -> boolean())).
--type(walker(A) :: fun ((A) -> 'finished' |
-                               {rabbit_types:msg_id(), non_neg_integer(), A})).
--type(shutdown_terms() :: [term()] | 'non_clean_shutdown').
-
--spec(erase/1 :: (rabbit_amqqueue:name()) -> 'ok').
--spec(reset_state/1 :: (qistate()) -> qistate()).
--spec(init/3 :: (rabbit_amqqueue:name(),
-                 on_sync_fun(), on_sync_fun()) -> qistate()).
--spec(recover/6 :: (rabbit_amqqueue:name(), shutdown_terms(), boolean(),
+                            }.
+-type contains_predicate() :: fun ((rabbit_types:msg_id()) -> boolean()).
+-type walker(A) :: fun ((A) -> 'finished' |
+                               {rabbit_types:msg_id(), non_neg_integer(), A}).
+-type shutdown_terms() :: [term()] | 'non_clean_shutdown'.
+
+-spec erase(rabbit_amqqueue:name()) -> 'ok'.
+-spec reset_state(qistate()) -> qistate().
+-spec init(rabbit_amqqueue:name(),
+                 on_sync_fun(), on_sync_fun()) -> qistate().
+-spec recover(rabbit_amqqueue:name(), shutdown_terms(), boolean(),
                     contains_predicate(),
                     on_sync_fun(), on_sync_fun()) ->
                         {'undefined' | non_neg_integer(),
-                         'undefined' | non_neg_integer(), qistate()}).
--spec(terminate/2 :: ([any()], qistate()) -> qistate()).
--spec(delete_and_terminate/1 :: (qistate()) -> qistate()).
--spec(publish/6 :: (rabbit_types:msg_id(), seq_id(),
+                         'undefined' | non_neg_integer(), qistate()}.
+-spec terminate([any()], qistate()) -> qistate().
+-spec delete_and_terminate(qistate()) -> qistate().
+-spec publish(rabbit_types:msg_id(), seq_id(),
                     rabbit_types:message_properties(), boolean(),
-                    non_neg_integer(), qistate()) -> qistate()).
--spec(deliver/2 :: ([seq_id()], qistate()) -> qistate()).
--spec(ack/2 :: ([seq_id()], qistate()) -> qistate()).
--spec(sync/1 :: (qistate()) -> qistate()).
--spec(needs_sync/1 :: (qistate()) -> 'confirms' | 'other' | 'false').
--spec(flush/1 :: (qistate()) -> qistate()).
--spec(read/3 :: (seq_id(), seq_id(), qistate()) ->
+                    non_neg_integer(), qistate()) -> qistate().
+-spec deliver([seq_id()], qistate()) -> qistate().
+-spec ack([seq_id()], qistate()) -> qistate().
+-spec sync(qistate()) -> qistate().
+-spec needs_sync(qistate()) -> 'confirms' | 'other' | 'false'.
+-spec flush(qistate()) -> qistate().
+-spec read(seq_id(), seq_id(), qistate()) ->
                      {[{rabbit_types:msg_id(), seq_id(),
                         rabbit_types:message_properties(),
-                        boolean(), boolean()}], qistate()}).
--spec(next_segment_boundary/1 :: (seq_id()) -> seq_id()).
--spec(bounds/1 :: (qistate()) ->
-                       {non_neg_integer(), non_neg_integer(), qistate()}).
--spec(start/1 :: ([rabbit_amqqueue:name()]) -> {[[any()]], {walker(A), A}}).
-
--spec(add_queue_ttl/0 :: () -> 'ok').
+                        boolean(), boolean()}], qistate()}.
+-spec next_segment_boundary(seq_id()) -> seq_id().
+-spec bounds(qistate()) ->
+                       {non_neg_integer(), non_neg_integer(), qistate()}.
+-spec start([rabbit_amqqueue:name()]) -> {[[any()]], {walker(A), A}}.
 
--endif.
+-spec add_queue_ttl() -> 'ok'.
 
 
 %%----------------------------------------------------------------------------
@@ -306,24 +302,9 @@ delete_and_terminate(State) ->
     State1.
 
 pre_publish(MsgOrId, SeqId, MsgProps, IsPersistent, IsDelivered, JournalSizeHint,
-            State = #qistate{unconfirmed       = UC,
-                             unconfirmed_msg   = UCM,
-                             pre_publish_cache = PPC,
+            State = #qistate{pre_publish_cache = PPC,
                              delivered_cache   = DC}) ->
-    MsgId = case MsgOrId of
-                #basic_message{id = Id} -> Id;
-                Id when is_binary(Id)   -> Id
-            end,
-    ?MSG_ID_BYTES = size(MsgId),
-
-    State1 =
-        case {MsgProps#message_properties.needs_confirming, MsgOrId} of
-            {true,  MsgId} -> UC1  = gb_sets:add_element(MsgId, UC),
-                              State#qistate{unconfirmed     = UC1};
-            {true,  _}     -> UCM1 = gb_sets:add_element(MsgId, UCM),
-                              State#qistate{unconfirmed_msg = UCM1};
-            {false, _}     -> State
-        end,
+    State1 = maybe_needs_confirming(MsgProps, MsgOrId, State),
 
     {Bin, MsgBin} = create_pub_record_body(MsgOrId, MsgProps),
 
@@ -377,23 +358,10 @@ flush_delivered_cache(State = #qistate{delivered_cache = DC}) ->
     State1 = deliver(lists:reverse(DC), State),
     State1#qistate{delivered_cache = []}.
 
-publish(MsgOrId, SeqId, MsgProps, IsPersistent, JournalSizeHint,
-        State = #qistate{unconfirmed     = UC,
-                         unconfirmed_msg = UCM}) ->
-    MsgId = case MsgOrId of
-                #basic_message{id = Id} -> Id;
-                Id when is_binary(Id)   -> Id
-            end,
-    ?MSG_ID_BYTES = size(MsgId),
+publish(MsgOrId, SeqId, MsgProps, IsPersistent, JournalSizeHint, State) ->
     {JournalHdl, State1} =
         get_journal_handle(
-          case {MsgProps#message_properties.needs_confirming, MsgOrId} of
-              {true,  MsgId} -> UC1  = gb_sets:add_element(MsgId, UC),
-                                State#qistate{unconfirmed     = UC1};
-              {true,  _}     -> UCM1 = gb_sets:add_element(MsgId, UCM),
-                                State#qistate{unconfirmed_msg = UCM1};
-              {false, _}     -> State
-          end),
+          maybe_needs_confirming(MsgProps, MsgOrId, State)),
     file_handle_cache_stats:update(queue_index_journal_write),
     {Bin, MsgBin} = create_pub_record_body(MsgOrId, MsgProps),
     ok = file_handle_cache:append(
@@ -407,6 +375,22 @@ publish(MsgOrId, SeqId, MsgProps, IsPersistent, JournalSizeHint,
       JournalSizeHint,
       add_to_journal(SeqId, {IsPersistent, Bin, MsgBin}, State1)).
 
+maybe_needs_confirming(MsgProps, MsgOrId,
+        State = #qistate{unconfirmed     = UC,
+                         unconfirmed_msg = UCM}) ->
+    MsgId = case MsgOrId of
+                #basic_message{id = Id} -> Id;
+                Id when is_binary(Id)   -> Id
+            end,
+    ?MSG_ID_BYTES = size(MsgId),
+    case {MsgProps#message_properties.needs_confirming, MsgOrId} of
+      {true,  MsgId} -> UC1  = gb_sets:add_element(MsgId, UC),
+                        State#qistate{unconfirmed     = UC1};
+      {true,  _}     -> UCM1 = gb_sets:add_element(MsgId, UCM),
+                        State#qistate{unconfirmed_msg = UCM1};
+      {false, _}     -> State
+    end.
+
 deliver(SeqIds, State) ->
     deliver_or_ack(del, SeqIds, State).
 
@@ -828,8 +812,9 @@ append_journal_to_segment(#segment { journal_entries = JEntries,
         _ ->
             file_handle_cache_stats:update(queue_index_write),
 
-            {ok, Hdl} = file_handle_cache:open(Path, ?WRITE_MODE,
-                                               [{write_buffer, infinity}]),
+            {ok, Hdl} = file_handle_cache:open_with_absolute_path(
+                          Path, ?WRITE_MODE,
+                          [{write_buffer, infinity}]),
             %% the file_handle_cache also does a list reverse, so this
             %% might not be required here, but before we were doing a
             %% sparse_foldr, a lists:reverse/1 seems to be the correct
@@ -844,8 +829,8 @@ get_journal_handle(State = #qistate { journal_handle = undefined,
                                       dir = Dir }) ->
     Path = filename:join(Dir, ?JOURNAL_FILENAME),
     ok = rabbit_file:ensure_dir(Path),
-    {ok, Hdl} = file_handle_cache:open(Path, ?WRITE_MODE,
-                                       [{write_buffer, infinity}]),
+    {ok, Hdl} = file_handle_cache:open_with_absolute_path(
+                  Path, ?WRITE_MODE, [{write_buffer, infinity}]),
     {Hdl, State #qistate { journal_handle = Hdl }};
 get_journal_handle(State = #qistate { journal_handle = Hdl }) ->
     {Hdl, State}.
@@ -1070,7 +1055,8 @@ load_segment(KeepAcked, #segment { path = Path }) ->
         false -> Empty;
         true  -> Size = rabbit_file:file_size(Path),
                  file_handle_cache_stats:update(queue_index_read),
-                 {ok, Hdl} = file_handle_cache:open(Path, ?READ_MODE, []),
+                 {ok, Hdl} = file_handle_cache:open_with_absolute_path(
+                               Path, ?READ_MODE, []),
                  {ok, 0} = file_handle_cache:position(Hdl, bof),
                  {ok, SegBin} = file_handle_cache:read(Hdl, Size),
                  ok = file_handle_cache:close(Hdl),
@@ -1395,10 +1381,11 @@ transform_file(Path, Fun) when is_function(Fun)->
     case rabbit_file:file_size(Path) of
         0    -> ok;
         Size -> {ok, PathTmpHdl} =
-                    file_handle_cache:open(PathTmp, ?WRITE_MODE,
-                                           [{write_buffer, infinity}]),
+                    file_handle_cache:open_with_absolute_path(
+                      PathTmp, ?WRITE_MODE,
+                      [{write_buffer, infinity}]),
 
-                {ok, PathHdl} = file_handle_cache:open(
+                {ok, PathHdl} = file_handle_cache:open_with_absolute_path(
                                   Path, ?READ_MODE, [{read_buffer, Size}]),
                 {ok, Content} = file_handle_cache:read(PathHdl, Size),
                 ok = file_handle_cache:close(PathHdl),
diff --git a/deps/rabbit/src/rabbit_queue_location_client_local.erl b/deps/rabbit/src/rabbit_queue_location_client_local.erl
new file mode 100644 (file)
index 0000000..cd7dfdd
--- /dev/null
@@ -0,0 +1,40 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_queue_location_client_local).
+-behaviour(rabbit_queue_master_locator).
+
+-include("rabbit.hrl").
+
+-export([description/0, queue_master_location/1]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "locate queue master client local"},
+                    {mfa,         {rabbit_registry, register,
+                                   [queue_master_locator,
+                                    <<"client-local">>, ?MODULE]}},
+                    {requires,    rabbit_registry},
+                    {enables,     kernel_ready}]}).
+
+
+%%---------------------------------------------------------------------------
+%% Queue Master Location Callbacks
+%%---------------------------------------------------------------------------
+
+description() ->
+    [{description, <<"Locate queue master node as the client local node">>}].
+
+queue_master_location(#amqqueue{}) ->   {ok, node()}.
diff --git a/deps/rabbit/src/rabbit_queue_location_min_masters.erl b/deps/rabbit/src/rabbit_queue_location_min_masters.erl
new file mode 100644 (file)
index 0000000..bc112a3
--- /dev/null
@@ -0,0 +1,79 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_queue_location_min_masters).
+-behaviour(rabbit_queue_master_locator).
+
+-include("rabbit.hrl").
+
+-export([description/0, queue_master_location/1]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "locate queue master min bound queues"},
+                    {mfa,         {rabbit_registry, register,
+                                   [queue_master_locator,
+                                    <<"min-masters">>, ?MODULE]}},
+                    {requires,    rabbit_registry},
+                    {enables,     kernel_ready}]}).
+
+%%---------------------------------------------------------------------------
+%% Queue Master Location Callbacks
+%%---------------------------------------------------------------------------
+
+description() ->
+    [{description,
+      <<"Locate queue master node from cluster node with least bound queues">>}].
+
+queue_master_location(#amqqueue{}) ->
+    Cluster            = rabbit_queue_master_location_misc:all_nodes(),
+    VHosts             = rabbit_vhost:list(),
+    BoundQueueMasters  = get_bound_queue_masters_per_vhost(VHosts, []),
+    {_Count, MinMaster}= get_min_master(Cluster, BoundQueueMasters),
+    {ok, MinMaster}.
+
+%%---------------------------------------------------------------------------
+%% Private helper functions
+%%---------------------------------------------------------------------------
+get_min_master(Cluster, BoundQueueMasters) ->
+    lists:min([ {count_masters(Node, BoundQueueMasters), Node} ||
+                  Node <- Cluster ]).
+
+count_masters(Node, Masters) ->
+    length([ X || X <- Masters, X == Node ]).
+
+get_bound_queue_masters_per_vhost([], Acc) ->
+    lists:flatten(Acc);
+get_bound_queue_masters_per_vhost([VHost|RemVHosts], Acc) ->
+    Bindings          = rabbit_binding:list(VHost),
+    BoundQueueMasters = get_queue_master_per_binding(VHost, Bindings, []),
+    get_bound_queue_masters_per_vhost(RemVHosts, [BoundQueueMasters|Acc]).
+
+
+get_queue_master_per_binding(_VHost, [], BoundQueueNodes) -> BoundQueueNodes;
+get_queue_master_per_binding(VHost, [#binding{destination=
+                                                  #resource{kind=queue,
+                                                            name=QueueName}}|
+                                     RemBindings],
+                             QueueMastersAcc) ->
+    QueueMastersAcc0 = case rabbit_queue_master_location_misc:lookup_master(
+                              QueueName, VHost) of
+                           {ok, Master} when is_atom(Master) ->
+                               [Master|QueueMastersAcc];
+                           _ -> QueueMastersAcc
+                       end,
+    get_queue_master_per_binding(VHost, RemBindings, QueueMastersAcc0);
+get_queue_master_per_binding(VHost, [_|RemBindings], QueueMastersAcc) ->
+    get_queue_master_per_binding(VHost, RemBindings, QueueMastersAcc).
diff --git a/deps/rabbit/src/rabbit_queue_location_random.erl b/deps/rabbit/src/rabbit_queue_location_random.erl
new file mode 100644 (file)
index 0000000..2579cbb
--- /dev/null
@@ -0,0 +1,44 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_queue_location_random).
+-behaviour(rabbit_queue_master_locator).
+
+-include("rabbit.hrl").
+
+-export([description/0, queue_master_location/1]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "locate queue master random"},
+                    {mfa,         {rabbit_registry, register,
+                                   [queue_master_locator,
+                                    <<"random">>, ?MODULE]}},
+                    {requires,    rabbit_registry},
+                    {enables,     kernel_ready}]}).
+
+%%---------------------------------------------------------------------------
+%% Queue Master Location Callbacks
+%%---------------------------------------------------------------------------
+
+description() ->
+    [{description,
+      <<"Locate queue master node from cluster in a random manner">>}].
+
+queue_master_location(#amqqueue{}) ->
+    Cluster    = rabbit_queue_master_location_misc:all_nodes(),
+    RandomPos  = erlang:phash2(time_compat:monotonic_time(), length(Cluster)),
+    MasterNode = lists:nth(RandomPos + 1, Cluster),
+    {ok, MasterNode}.
diff --git a/deps/rabbit/src/rabbit_queue_location_validator.erl b/deps/rabbit/src/rabbit_queue_location_validator.erl
new file mode 100644 (file)
index 0000000..c5aad50
--- /dev/null
@@ -0,0 +1,71 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_queue_location_validator).
+-behaviour(rabbit_policy_validator).
+
+-include("rabbit.hrl").
+
+-export([validate_policy/1, validate_strategy/1]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "Queue location policy validation"},
+                    {mfa, {rabbit_registry, register,
+                           [policy_validator,
+                            <<"queue-master-locator">>,
+                            ?MODULE]}},
+                   {requires, rabbit_registry},
+                   {enables, recovery}]}).
+
+validate_policy(KeyList) ->
+    case proplists:lookup(<<"queue-master-locator">> , KeyList) of
+        {_, Strategy} -> validate_strategy(Strategy);
+        _             -> {error, "queue-master-locator undefined"}
+    end.
+
+validate_strategy(Strategy) ->
+    case module(Strategy) of
+        R = {ok, _M} -> R;
+        _            ->
+            {error, "~p invalid queue-master-locator value", [Strategy]}
+    end.
+
+policy(Policy, Q) ->
+    case rabbit_policy:get(Policy, Q) of
+        undefined -> none;
+        P         -> P
+    end.
+
+module(#amqqueue{} = Q) ->
+    case policy(<<"queue-master-locator">>, Q) of
+        undefined -> no_location_strategy;
+        Mode      -> module(Mode)
+    end;
+
+module(Strategy) when is_binary(Strategy) ->
+    case rabbit_registry:binary_to_type(Strategy) of
+        {error, not_found} -> no_location_strategy;
+        T ->
+            case rabbit_registry:lookup_module(queue_master_locator, T) of
+                {ok, Module} ->
+                    case code:which(Module) of
+                        non_existing -> no_location_strategy;
+                        _            -> {ok, Module}
+                    end;
+                _            ->
+                    no_location_strategy
+            end
+    end.
diff --git a/deps/rabbit/src/rabbit_queue_master_location_misc.erl b/deps/rabbit/src/rabbit_queue_master_location_misc.erl
new file mode 100644 (file)
index 0000000..3d5b2cd
--- /dev/null
@@ -0,0 +1,95 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_queue_master_location_misc).
+
+-include("rabbit.hrl").
+
+-export([lookup_master/2,
+         lookup_queue/2,
+         get_location/1,
+         get_location_mod_by_config/1,
+         get_location_mod_by_args/1,
+         get_location_mod_by_policy/1,
+         all_nodes/0]).
+
+lookup_master(QueueNameBin, VHostPath) when is_binary(QueueNameBin),
+                                            is_binary(VHostPath) ->
+    Queue = rabbit_misc:r(VHostPath, queue, QueueNameBin),
+    case rabbit_amqqueue:lookup(Queue) of
+        {ok, #amqqueue{pid = Pid}} when is_pid(Pid) ->
+            {ok, node(Pid)};
+        Error -> Error
+    end.
+
+lookup_queue(QueueNameBin, VHostPath) when is_binary(QueueNameBin),
+                                           is_binary(VHostPath) ->
+    Queue = rabbit_misc:r(VHostPath, queue, QueueNameBin),
+    case rabbit_amqqueue:lookup(Queue) of
+        Reply = {ok, #amqqueue{}} -> Reply;
+        Error                     -> Error
+    end.
+
+get_location(Queue=#amqqueue{})->
+    Reply1 = case get_location_mod_by_args(Queue) of
+                 _Err1 = {error, _} ->
+                     case get_location_mod_by_policy(Queue) of
+                         _Err2 = {error, _} ->
+                             case get_location_mod_by_config(Queue) of
+                                 Err3 = {error, _}      -> Err3;
+                                 Reply0 = {ok, _Module} -> Reply0
+                             end;
+                         Reply0 = {ok, _Module} -> Reply0
+                     end;
+                 Reply0 = {ok, _Module} -> Reply0
+             end,
+
+    case Reply1 of
+        {ok, CB} -> CB:queue_master_location(Queue);
+        Error    -> Error
+    end.
+
+get_location_mod_by_args(#amqqueue{arguments=Args}) ->
+    case proplists:lookup(<<"x-queue-master-locator">> , Args) of
+        {<<"x-queue-master-locator">> , Strategy}  ->
+            case rabbit_queue_location_validator:validate_strategy(Strategy) of
+                Reply = {ok, _CB} -> Reply;
+                Error             -> Error
+            end;
+        _ -> {error, "x-queue-master-locator undefined"}
+    end.
+
+get_location_mod_by_policy(Queue=#amqqueue{}) ->
+    case rabbit_policy:get(<<"queue-master-locator">> , Queue) of
+        undefined ->  {error, "queue-master-locator policy undefined"};
+        Strategy  ->
+            case rabbit_queue_location_validator:validate_strategy(Strategy) of
+                Reply = {ok, _CB} -> Reply;
+                Error             -> Error
+            end
+    end.
+
+get_location_mod_by_config(#amqqueue{}) ->
+    case application:get_env(rabbit, queue_master_locator) of
+        {ok, Strategy} ->
+            case rabbit_queue_location_validator:validate_strategy(Strategy) of
+                Reply = {ok, _CB} -> Reply;
+                Error             -> Error
+            end;
+        _ -> {error, "queue_master_locator undefined"}
+    end.
+
+all_nodes()  -> rabbit_mnesia:cluster_nodes(running).
similarity index 90%
rename from rabbitmq-server/src/rabbit_recovery_terms.erl
rename to deps/rabbit/src/rabbit_recovery_terms.erl
index 114029651b0e83463e1d082ab737d259a654cba2..f6f94eca45b1e36eed213544579734bb7f646b7a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% We use a gen_server simply so that during the terminate/2 call
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start() -> rabbit_types:ok_or_error(term())).
--spec(stop() -> rabbit_types:ok_or_error(term())).
--spec(store(file:filename(), term()) -> rabbit_types:ok_or_error(term())).
--spec(read(file:filename()) -> rabbit_types:ok_or_error2(term(), not_found)).
--spec(clear() -> 'ok').
-
--endif. % use_specs
+-spec start() -> rabbit_types:ok_or_error(term()).
+-spec stop() -> rabbit_types:ok_or_error(term()).
+-spec store(file:filename(), term()) -> rabbit_types:ok_or_error(term()).
+-spec read(file:filename()) -> rabbit_types:ok_or_error2(term(), not_found).
+-spec clear() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
similarity index 91%
rename from rabbitmq-server/src/rabbit_registry.erl
rename to deps/rabbit/src/rabbit_registry.erl
index fc016e718ea1d8c4acb5816cd696728cb5e576e3..0428c3533fe52af6e1439d85857e430c55c8fc7a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_registry).
 -define(SERVER, ?MODULE).
 -define(ETS_NAME, ?MODULE).
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(register/3 :: (atom(), binary(), atom()) -> 'ok').
--spec(unregister/2 :: (atom(), binary()) -> 'ok').
--spec(binary_to_type/1 ::
-        (binary()) -> atom() | rabbit_types:error('not_found')).
--spec(lookup_module/2 ::
-        (atom(), atom()) -> rabbit_types:ok_or_error2(atom(), 'not_found')).
--spec(lookup_all/1 :: (atom()) -> [{atom(), atom()}]).
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec register(atom(), binary(), atom()) -> 'ok'.
+-spec unregister(atom(), binary()) -> 'ok'.
+-spec binary_to_type
+        (binary()) -> atom() | rabbit_types:error('not_found').
+-spec lookup_module
+        (atom(), atom()) -> rabbit_types:ok_or_error2(atom(), 'not_found').
+-spec lookup_all(atom()) -> [{atom(), atom()}].
 
 %%---------------------------------------------------------------------------
 
@@ -133,7 +129,8 @@ class_module(exchange_decorator)  -> rabbit_exchange_decorator;
 class_module(queue_decorator)     -> rabbit_queue_decorator;
 class_module(policy_validator)    -> rabbit_policy_validator;
 class_module(ha_mode)             -> rabbit_mirror_queue_mode;
-class_module(channel_interceptor) -> rabbit_channel_interceptor.
+class_module(channel_interceptor) -> rabbit_channel_interceptor;
+class_module(queue_master_locator)-> rabbit_queue_master_locator.
 
 %%---------------------------------------------------------------------------
 
diff --git a/deps/rabbit/src/rabbit_resource_monitor_misc.erl b/deps/rabbit/src/rabbit_resource_monitor_misc.erl
new file mode 100644 (file)
index 0000000..56faefe
--- /dev/null
@@ -0,0 +1,47 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+
+-module(rabbit_resource_monitor_misc).
+
+-export([parse_information_unit/1]).
+
+-spec parse_information_unit(integer() | string()) ->
+          {ok, integer()} | {error, parse_error}.
+
+parse_information_unit(Value) when is_integer(Value) -> {ok, Value};
+parse_information_unit(Value) when is_list(Value) ->
+    case re:run(Value,
+                "^(?<VAL>[0-9]+)(?<UNIT>kB|KB|MB|GB|kb|mb|gb|Kb|Mb|Gb|kiB|KiB|MiB|GiB|kib|mib|gib|KIB|MIB|GIB|k|K|m|M|g|G)?$",
+                [{capture, all_but_first, list}]) of
+       {match, [[], _]} ->
+            {ok, list_to_integer(Value)};
+        {match, [Num]} ->
+            {ok, list_to_integer(Num)};
+        {match, [Num, Unit]} ->
+            Multiplier = case Unit of
+                             KiB when KiB =:= "k";  KiB =:= "kiB"; KiB =:= "K"; KiB =:= "KIB"; KiB =:= "kib" -> 1024;
+                             MiB when MiB =:= "m";  MiB =:= "MiB"; MiB =:= "M"; MiB =:= "MIB"; MiB =:= "mib" -> 1024*1024;
+                             GiB when GiB =:= "g";  GiB =:= "GiB"; GiB =:= "G"; GiB =:= "GIB"; GiB =:= "gib" -> 1024*1024*1024;
+                             KB  when KB  =:= "KB"; KB  =:= "kB"; KB =:= "kb"; KB =:= "Kb"  -> 1000;
+                             MB  when MB  =:= "MB"; MB  =:= "mB"; MB =:= "mb"; MB =:= "Mb"  -> 1000000;
+                             GB  when GB  =:= "GB"; GB  =:= "gB"; GB =:= "gb"; GB =:= "Gb"  -> 1000000000
+                         end,
+            {ok, list_to_integer(Num) * Multiplier};
+        nomatch ->
+                                                % log error
+            {error, parse_error}
+    end.
similarity index 81%
rename from rabbitmq-server/src/rabbit_restartable_sup.erl
rename to deps/rabbit/src/rabbit_restartable_sup.erl
index 516eea91e94e7c8db6f6b69fef8cdf2986a1f3ea..196025918827075c5569c5c8407161f4f642a49f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_restartable_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/3 :: (atom(), rabbit_types:mfargs(), boolean()) ->
-                           rabbit_types:ok_pid_or_error()).
-
--endif.
+-spec start_link(atom(), rabbit_types:mfargs(), boolean()) ->
+                           rabbit_types:ok_pid_or_error().
 
 %%----------------------------------------------------------------------------
 
@@ -45,4 +41,4 @@ init([{Mod, _F, _A} = Fun, Delay]) ->
           [{Mod, Fun, case Delay of
                           true  -> {transient, 1};
                           false -> transient
-                      end, ?MAX_WAIT, worker, [Mod]}]}}.
+                      end, ?WORKER_WAIT, worker, [Mod]}]}}.
similarity index 90%
rename from rabbitmq-server/src/rabbit_router.erl
rename to deps/rabbit/src/rabbit_router.erl
index 9692167c6d90bdff61c59ec07614f71b908c1053..d4390ac4d85a56ba86c881eeb467e940745d7393 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_router).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([routing_key/0, match_result/0]).
 
--type(routing_key() :: binary()).
--type(match_result() :: [rabbit_types:binding_destination()]).
+-type routing_key() :: binary().
+-type match_result() :: [rabbit_types:binding_destination()].
 
--spec(match_bindings/2 :: (rabbit_types:binding_source(),
+-spec match_bindings(rabbit_types:binding_source(),
                            fun ((rabbit_types:binding()) -> boolean())) ->
-    match_result()).
--spec(match_routing_key/2 :: (rabbit_types:binding_source(),
+    match_result().
+-spec match_routing_key(rabbit_types:binding_source(),
                              [routing_key()] | ['_']) ->
-    match_result()).
-
--endif.
+    match_result().
 
 %%----------------------------------------------------------------------------
 
similarity index 72%
rename from rabbitmq-server/src/rabbit_runtime_parameters.erl
rename to deps/rabbit/src/rabbit_runtime_parameters.erl
index fafd598bb731ebe013127d90233bba27b0ba540a..97f78da8ba5c3197bcdbcfc7c0f38b3152d095fb 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_runtime_parameters).
 
+%% Runtime parameters are bits of configuration that are
+%% set, as the name implies, at runtime and not in the config file.
+%%
+%% The benefits of storing some bits of configuration at runtime vary:
+%%
+%%  * Some parameters are vhost-specific
+%%  * Others are specific to individual nodes
+%%  * ...or even queues, exchanges, etc
+%%
+%% The most obvious use case for runtime parameters is policies but
+%% there are others:
+%% 
+%% * Plugin-specific parameters that only make sense at runtime,
+%%   e.g. Federation and Shovel link settings
+%% * Exchange and queue decorators
+%%
+%% Parameters are grouped by components, e.g. <<"policy">> or <<"shovel">>.
+%% Components are mapped to modules that perform validation.
+%% Runtime parameter values are then looked up by the modules that
+%% need to use them.
+%%
+%% Parameters are stored in Mnesia and can be global. Their changes
+%% are broadcasted over rabbit_event.
+%%
+%% See also:
+%%
+%%  * rabbit_policies
+%%  * rabbit_policy
+%%  * rabbit_registry
+%%  * rabbit_event
+
 -include("rabbit.hrl").
 
 -export([parse_set/5, set/5, set_any/5, clear/3, clear_any/3, list/0, list/1,
-         list_component/1, list/2, list_formatted/1, lookup/3,
-         value/3, value/4, info_keys/0]).
+         list_component/1, list/2, list_formatted/1, list_formatted/3,
+         lookup/3, value/3, value/4, info_keys/0, clear_component/1]).
 
 -export([set_global/2, value_global/1, value_global/2]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(ok_or_error_string() :: 'ok' | {'error_string', string()}).
--type(ok_thunk_or_error_string() :: ok_or_error_string() | fun(() -> 'ok')).
-
--spec(parse_set/5 :: (rabbit_types:vhost(), binary(), binary(), string(),
-                      rabbit_types:user() | 'none') -> ok_or_error_string()).
--spec(set/5 :: (rabbit_types:vhost(), binary(), binary(), term(),
-                rabbit_types:user() | 'none') -> ok_or_error_string()).
--spec(set_any/5 :: (rabbit_types:vhost(), binary(), binary(), term(),
-                    rabbit_types:user() | 'none') -> ok_or_error_string()).
--spec(set_global/2 :: (atom(), term()) -> 'ok').
--spec(clear/3 :: (rabbit_types:vhost(), binary(), binary())
-                 -> ok_thunk_or_error_string()).
--spec(clear_any/3 :: (rabbit_types:vhost(), binary(), binary())
-                     -> ok_thunk_or_error_string()).
--spec(list/0 :: () -> [rabbit_types:infos()]).
--spec(list/1 :: (rabbit_types:vhost() | '_') -> [rabbit_types:infos()]).
--spec(list_component/1 :: (binary()) -> [rabbit_types:infos()]).
--spec(list/2 :: (rabbit_types:vhost() | '_', binary() | '_')
-                -> [rabbit_types:infos()]).
--spec(list_formatted/1 :: (rabbit_types:vhost()) -> [rabbit_types:infos()]).
--spec(lookup/3 :: (rabbit_types:vhost(), binary(), binary())
-                  -> rabbit_types:infos() | 'not_found').
--spec(value/3 :: (rabbit_types:vhost(), binary(), binary()) -> term()).
--spec(value/4 :: (rabbit_types:vhost(), binary(), binary(), term()) -> term()).
--spec(value_global/1 :: (atom()) -> term() | 'not_found').
--spec(value_global/2 :: (atom(), term()) -> term()).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
-
--endif.
+-type ok_or_error_string() :: 'ok' | {'error_string', string()}.
+-type ok_thunk_or_error_string() :: ok_or_error_string() | fun(() -> 'ok').
+
+-spec parse_set(rabbit_types:vhost(), binary(), binary(), string(),
+                      rabbit_types:user() | 'none') -> ok_or_error_string().
+-spec set(rabbit_types:vhost(), binary(), binary(), term(),
+                rabbit_types:user() | 'none') -> ok_or_error_string().
+-spec set_any(rabbit_types:vhost(), binary(), binary(), term(),
+                    rabbit_types:user() | 'none') -> ok_or_error_string().
+-spec set_global(atom(), term()) -> 'ok'.
+-spec clear(rabbit_types:vhost(), binary(), binary())
+                 -> ok_thunk_or_error_string().
+-spec clear_any(rabbit_types:vhost(), binary(), binary())
+                     -> ok_thunk_or_error_string().
+-spec list() -> [rabbit_types:infos()].
+-spec list(rabbit_types:vhost() | '_') -> [rabbit_types:infos()].
+-spec list_component(binary()) -> [rabbit_types:infos()].
+-spec list(rabbit_types:vhost() | '_', binary() | '_')
+                -> [rabbit_types:infos()].
+-spec list_formatted(rabbit_types:vhost()) -> [rabbit_types:infos()].
+-spec list_formatted(rabbit_types:vhost(), reference(), pid()) -> 'ok'.
+-spec lookup(rabbit_types:vhost(), binary(), binary())
+                  -> rabbit_types:infos() | 'not_found'.
+-spec value(rabbit_types:vhost(), binary(), binary()) -> term().
+-spec value(rabbit_types:vhost(), binary(), binary(), term()) -> term().
+-spec value_global(atom()) -> term() | 'not_found'.
+-spec value_global(atom(), term()) -> term().
+-spec info_keys() -> rabbit_types:info_keys().
 
 %%---------------------------------------------------------------------------
 
@@ -139,6 +167,17 @@ clear(_, <<"policy">> , _) ->
 clear(VHost, Component, Name) ->
     clear_any(VHost, Component, Name).
 
+clear_component(Component) ->
+    case rabbit_runtime_parameters:list_component(Component) of
+        [] ->
+            ok;
+        Xs ->
+            [rabbit_runtime_parameters:clear(pget(vhost, X),
+                                             pget(component, X),
+                                             pget(name, X))|| X <- Xs],
+            ok
+    end.
+
 clear_any(VHost, Component, Name) ->
     Notify = fun () ->
                      case lookup_component(Component) of
@@ -198,6 +237,11 @@ list(VHost, Component) ->
 list_formatted(VHost) ->
     [pset(value, format(pget(value, P)), P) || P <- list(VHost)].
 
+list_formatted(VHost, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(
+      AggregatorPid, Ref,
+      fun(P) -> pset(value, format(pget(value, P)), P) end, list(VHost)).
+
 lookup(VHost, Component, Name) ->
     case lookup0({VHost, Component, Name}, rabbit_misc:const(not_found)) of
         not_found -> not_found;
similarity index 97%
rename from rabbitmq-server/src/rabbit_sasl_report_file_h.erl
rename to deps/rabbit/src/rabbit_sasl_report_file_h.erl
index c6000a268ab99928a9db904046c2dcedd878beaa..9c6d7657f29898500b253552ade8882537dbed36 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_sasl_report_file_h).
@@ -36,6 +36,8 @@
 
 %% Used only when swapping handlers and performing
 %% log rotation
+init({{File, ""}, _}) ->
+    init(File);
 init({{File, Suffix}, []}) ->
     case rabbit_file:append_file(File, Suffix) of
         ok -> file:delete(File),
similarity index 95%
rename from rabbitmq-server/src/rabbit_ssl.erl
rename to deps/rabbit/src/rabbit_ssl.erl
index c07a913723ccf10b9702d1f4bb337dc4743cd25c..ac9fb204d0b194234956973499e4bcd3995e8d83 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_ssl).
 
 %%--------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([certificate/0]).
 
--type(certificate() :: binary()).
-
--spec(peer_cert_issuer/1        :: (certificate()) -> string()).
--spec(peer_cert_subject/1       :: (certificate()) -> string()).
--spec(peer_cert_validity/1      :: (certificate()) -> string()).
--spec(peer_cert_subject_items/2  ::
-        (certificate(), tuple()) -> [string()] | 'not_found').
--spec(peer_cert_auth_name/1 ::
-        (certificate()) -> binary() | 'not_found' | 'unsafe').
+-type certificate() :: binary().
 
--endif.
+-spec peer_cert_issuer(certificate()) -> string().
+-spec peer_cert_subject(certificate()) -> string().
+-spec peer_cert_validity(certificate()) -> string().
+-spec peer_cert_subject_items
+        (certificate(), tuple()) -> [string()] | 'not_found'.
+-spec peer_cert_auth_name
+        (certificate()) -> binary() | 'not_found' | 'unsafe'.
 
 %%--------------------------------------------------------------------------
 %% High-level functions used by reader
similarity index 77%
rename from rabbitmq-server/src/rabbit_sup.erl
rename to deps/rabbit/src/rabbit_sup.erl
index 537ff8d6601788543b391e0479e1dc1b40a604b4..ad70540e5b26c571ef4387ee821077ec5f274047 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start_child/1 :: (atom()) -> 'ok').
--spec(start_child/2 :: (atom(), [any()]) -> 'ok').
--spec(start_child/3 :: (atom(), atom(), [any()]) -> 'ok').
--spec(start_supervisor_child/1 :: (atom()) -> 'ok').
--spec(start_supervisor_child/2 :: (atom(), [any()]) -> 'ok').
--spec(start_supervisor_child/3 :: (atom(), atom(), [any()]) -> 'ok').
--spec(start_restartable_child/1 :: (atom()) -> 'ok').
--spec(start_restartable_child/2 :: (atom(), [any()]) -> 'ok').
--spec(start_delayed_restartable_child/1 :: (atom()) -> 'ok').
--spec(start_delayed_restartable_child/2 :: (atom(), [any()]) -> 'ok').
--spec(stop_child/1 :: (atom()) -> rabbit_types:ok_or_error(any())).
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start_child(atom()) -> 'ok'.
+-spec start_child(atom(), [any()]) -> 'ok'.
+-spec start_child(atom(), atom(), [any()]) -> 'ok'.
+-spec start_supervisor_child(atom()) -> 'ok'.
+-spec start_supervisor_child(atom(), [any()]) -> 'ok'.
+-spec start_supervisor_child(atom(), atom(), [any()]) -> 'ok'.
+-spec start_restartable_child(atom()) -> 'ok'.
+-spec start_restartable_child(atom(), [any()]) -> 'ok'.
+-spec start_delayed_restartable_child(atom()) -> 'ok'.
+-spec start_delayed_restartable_child(atom(), [any()]) -> 'ok'.
+-spec stop_child(atom()) -> rabbit_types:ok_or_error(any()).
 
 %%----------------------------------------------------------------------------
 
@@ -62,7 +58,7 @@ start_child(ChildId, Mod, Args) ->
     child_reply(supervisor:start_child(
                   ?SERVER,
                   {ChildId, {Mod, start_link, Args},
-                   transient, ?MAX_WAIT, worker, [Mod]})).
+                   transient, ?WORKER_WAIT, worker, [Mod]})).
 
 start_supervisor_child(Mod) -> start_supervisor_child(Mod, []).
 
similarity index 95%
rename from rabbitmq-server/src/rabbit_table.erl
rename to deps/rabbit/src/rabbit_table.erl
index a873a714e13a272e0cf5e567907c226d4b2eaca1..390909696499502b4aeb2f4f205d34890379f496 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_table).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(create/0 :: () -> 'ok').
--spec(create_local_copy/1 :: ('disc' | 'ram') -> 'ok').
--spec(wait_for_replicated/0 :: () -> 'ok').
--spec(wait/1 :: ([atom()]) -> 'ok').
--spec(wait_timeout/0 :: () -> non_neg_integer() | infinity).
--spec(force_load/0 :: () -> 'ok').
--spec(is_present/0 :: () -> boolean()).
--spec(is_empty/0 :: () -> boolean()).
--spec(needs_default_data/0 :: () -> boolean()).
--spec(check_schema_integrity/0 :: () -> rabbit_types:ok_or_error(any())).
--spec(clear_ram_only_tables/0 :: () -> 'ok').
-
--endif.
+-spec create() -> 'ok'.
+-spec create_local_copy('disc' | 'ram') -> 'ok'.
+-spec wait_for_replicated() -> 'ok'.
+-spec wait([atom()]) -> 'ok'.
+-spec wait_timeout() -> non_neg_integer() | infinity.
+-spec force_load() -> 'ok'.
+-spec is_present() -> boolean().
+-spec is_empty() -> boolean().
+-spec needs_default_data() -> boolean().
+-spec check_schema_integrity() -> rabbit_types:ok_or_error(any()).
+-spec clear_ram_only_tables() -> 'ok'.
 
 %%----------------------------------------------------------------------------
 %% Main interface
similarity index 88%
rename from rabbitmq-server/src/rabbit_trace.erl
rename to deps/rabbit/src/rabbit_trace.erl
index 49b16078fc772200799d1c3e771d36b06f79bf21..4bfd94e1e0022c3fd8cbf11bdc49994ae16c50cd 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_trace).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type state() :: rabbit_types:exchange() | 'none'.
 
--type(state() :: rabbit_types:exchange() | 'none').
-
--spec(init/1 :: (rabbit_types:vhost()) -> state()).
--spec(enabled/1 :: (rabbit_types:vhost()) -> boolean()).
--spec(tap_in/6 :: (rabbit_types:basic_message(), [rabbit_amqqueue:name()],
+-spec init(rabbit_types:vhost()) -> state().
+-spec enabled(rabbit_types:vhost()) -> boolean().
+-spec tap_in(rabbit_types:basic_message(), [rabbit_amqqueue:name()],
                    binary(), rabbit_channel:channel_number(),
-                   rabbit_types:username(), state()) -> 'ok').
--spec(tap_out/5 :: (rabbit_amqqueue:qmsg(), binary(),
+                   rabbit_types:username(), state()) -> 'ok'.
+-spec tap_out(rabbit_amqqueue:qmsg(), binary(),
                     rabbit_channel:channel_number(),
-                    rabbit_types:username(), state()) -> 'ok').
-
--spec(start/1 :: (rabbit_types:vhost()) -> 'ok').
--spec(stop/1 :: (rabbit_types:vhost()) -> 'ok').
+                    rabbit_types:username(), state()) -> 'ok'.
 
--endif.
+-spec start(rabbit_types:vhost()) -> 'ok'.
+-spec stop(rabbit_types:vhost()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
similarity index 95%
rename from rabbitmq-server/src/rabbit_upgrade.erl
rename to deps/rabbit/src/rabbit_upgrade.erl
index daf39b8acc0b8711fe5b3ad30a00dc0e0c1e658b..f88b7cc73fcb46d5d0e546afb8bf6a5e7123feaf 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_upgrade).
 
 %% -------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(maybe_upgrade_mnesia/0 :: () -> 'ok').
--spec(maybe_upgrade_local/0 :: () -> 'ok' |
-                                     'version_not_available' |
-                                     'starting_from_scratch').
-
--endif.
+-spec maybe_upgrade_mnesia() -> 'ok'.
+-spec maybe_upgrade_local() ->
+          'ok' |
+          'version_not_available' |
+          'starting_from_scratch'.
 
 %% -------------------------------------------------------------------
 
@@ -100,7 +97,12 @@ ensure_backup_taken() ->
                      false -> ok = take_backup();
                      _     -> ok
                  end;
-        true  -> throw({error, previous_upgrade_failed})
+        true  ->
+          error("Found lock file at ~s.
+            Either previous upgrade is in progress or has failed.
+            Database backup path: ~s",
+            [lock_filename(), backup_dir()]),
+          throw({error, previous_upgrade_failed})
     end.
 
 take_backup() ->
@@ -108,7 +110,7 @@ take_backup() ->
     case rabbit_mnesia:copy_db(BackupDir) of
         ok         -> info("upgrades: Mnesia dir backed up to ~p~n",
                            [BackupDir]);
-        {error, E} -> throw({could_not_back_up_mnesia_dir, E})
+        {error, E} -> throw({could_not_back_up_mnesia_dir, E, BackupDir})
     end.
 
 ensure_backup_removed() ->
similarity index 85%
rename from rabbitmq-server/src/rabbit_upgrade_functions.erl
rename to deps/rabbit/src/rabbit_upgrade_functions.erl
index 4eced3f32fa934be79ad556f8d3f2d5f5538a524..8609a0e424846a019df7f7412f60e2352e2ab6a3 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_upgrade_functions).
 -rabbit_upgrade({down_slave_nodes,      mnesia, [queue_decorators]}).
 -rabbit_upgrade({queue_state,           mnesia, [down_slave_nodes]}).
 -rabbit_upgrade({recoverable_slaves,    mnesia, [queue_state]}).
+-rabbit_upgrade({policy_version,        mnesia, [recoverable_slaves]}).
+-rabbit_upgrade({user_password_hashing, mnesia, [hash_passwords]}).
 
 %% -------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(remove_user_scope/0     :: () -> 'ok').
--spec(hash_passwords/0        :: () -> 'ok').
--spec(add_ip_to_listener/0    :: () -> 'ok').
--spec(internal_exchanges/0    :: () -> 'ok').
--spec(user_to_internal_user/0 :: () -> 'ok').
--spec(topic_trie/0            :: () -> 'ok').
--spec(semi_durable_route/0    :: () -> 'ok').
--spec(exchange_event_serial/0 :: () -> 'ok').
--spec(trace_exchanges/0       :: () -> 'ok').
--spec(user_admin_to_tags/0    :: () -> 'ok').
--spec(ha_mirrors/0            :: () -> 'ok').
--spec(gm/0                    :: () -> 'ok').
--spec(exchange_scratch/0      :: () -> 'ok').
--spec(mirrored_supervisor/0   :: () -> 'ok').
--spec(topic_trie_node/0       :: () -> 'ok').
--spec(runtime_parameters/0    :: () -> 'ok').
--spec(policy/0                :: () -> 'ok').
--spec(sync_slave_pids/0       :: () -> 'ok').
--spec(no_mirror_nodes/0       :: () -> 'ok').
--spec(gm_pids/0               :: () -> 'ok').
--spec(exchange_decorators/0   :: () -> 'ok').
--spec(policy_apply_to/0       :: () -> 'ok').
--spec(queue_decorators/0      :: () -> 'ok').
--spec(internal_system_x/0     :: () -> 'ok').
--spec(cluster_name/0          :: () -> 'ok').
--spec(down_slave_nodes/0      :: () -> 'ok').
--spec(queue_state/0           :: () -> 'ok').
--spec(recoverable_slaves/0    :: () -> 'ok').
-
--endif.
+-spec remove_user_scope() -> 'ok'.
+-spec hash_passwords() -> 'ok'.
+-spec add_ip_to_listener() -> 'ok'.
+-spec internal_exchanges() -> 'ok'.
+-spec user_to_internal_user() -> 'ok'.
+-spec topic_trie() -> 'ok'.
+-spec semi_durable_route() -> 'ok'.
+-spec exchange_event_serial() -> 'ok'.
+-spec trace_exchanges() -> 'ok'.
+-spec user_admin_to_tags() -> 'ok'.
+-spec ha_mirrors() -> 'ok'.
+-spec gm() -> 'ok'.
+-spec exchange_scratch() -> 'ok'.
+-spec mirrored_supervisor() -> 'ok'.
+-spec topic_trie_node() -> 'ok'.
+-spec runtime_parameters() -> 'ok'.
+-spec policy() -> 'ok'.
+-spec sync_slave_pids() -> 'ok'.
+-spec no_mirror_nodes() -> 'ok'.
+-spec gm_pids() -> 'ok'.
+-spec exchange_decorators() -> 'ok'.
+-spec policy_apply_to() -> 'ok'.
+-spec queue_decorators() -> 'ok'.
+-spec internal_system_x() -> 'ok'.
+-spec cluster_name() -> 'ok'.
+-spec down_slave_nodes() -> 'ok'.
+-spec queue_state() -> 'ok'.
+-spec recoverable_slaves() -> 'ok'.
+-spec user_password_hashing() -> 'ok'.
 
 %%--------------------------------------------------------------------
 
@@ -103,11 +102,15 @@ remove_user_scope() ->
       end,
       [user_vhost, permission]).
 
+%% this is an early migration that hashes passwords using MD5,
+%% only relevant to those migrating from 2.1.1.
+%% all users created after in 3.6.0 or later will use SHA-256 (unless configured
+%% otherwise)
 hash_passwords() ->
     transform(
       rabbit_user,
       fun ({user, Username, Password, IsAdmin}) ->
-              Hash = rabbit_auth_backend_internal:hash_password(Password),
+              Hash = rabbit_auth_backend_internal:hash_password(rabbit_password_hashing_md5, Password),
               {user, Username, Hash, IsAdmin}
       end,
       [username, password_hash, is_admin]).
@@ -431,6 +434,35 @@ recoverable_slaves(Table) ->
        sync_slave_pids, recoverable_slaves, policy, gm_pids, decorators,
        state]).
 
+policy_version() ->
+    ok = policy_version(rabbit_queue),
+    ok = policy_version(rabbit_durable_queue).
+
+policy_version(Table) ->
+    transform(
+      Table,
+      fun ({amqqueue, Name, Durable, AutoDelete, ExclusiveOwner, Arguments,
+            Pid, SlavePids, SyncSlavePids, DSN, Policy, GmPids, Decorators,
+            State}) ->
+              {amqqueue, Name, Durable, AutoDelete, ExclusiveOwner, Arguments,
+               Pid, SlavePids, SyncSlavePids, DSN, Policy, GmPids, Decorators,
+               State, 0}
+      end,
+      [name, durable, auto_delete, exclusive_owner, arguments, pid, slave_pids,
+       sync_slave_pids, recoverable_slaves, policy, gm_pids, decorators, state,
+       policy_version]).
+
+%% Prior to 3.6.0, passwords were hashed using MD5, this populates
+%% existing records with said default.  Users created with 3.6.0+ will
+%% have internal_user.hashing_algorithm populated by the internal
+%% authn backend.
+user_password_hashing() ->
+    transform(
+      rabbit_user,
+      fun ({internal_user, Username, Hash, Tags}) ->
+              {internal_user, Username, Hash, Tags, rabbit_password_hashing_md5}
+      end,
+      [username, password_hash, tags, hashing_algorithm]).
 
 %%--------------------------------------------------------------------
 
@@ -452,8 +484,8 @@ create(Tab, TabDef) ->
 %% Dumb replacement for rabbit_exchange:declare that does not require
 %% the exchange type registry or worker pool to be running by dint of
 %% not validating anything and assuming the exchange type does not
-%% require serialisation.
-%% NB: this assumes the pre-exchange-scratch-space format
+%% require serialisation.  NB: this assumes the
+%% pre-exchange-scratch-space format
 declare_exchange(XName, Type) ->
     X = {exchange, XName, Type, true, false, false, []},
     ok = mnesia:dirty_write(rabbit_durable_exchange, X).
similarity index 80%
rename from rabbitmq-server/src/rabbit_variable_queue.erl
rename to deps/rabbit/src/rabbit_variable_queue.erl
index a0e71c69de85043169629820d5d9f93c9b7a6ec5..dd9225614666c163de9e2f6fc4a12d206b79be8e 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_variable_queue).
 
 -export([init/3, terminate/2, delete_and_terminate/2, delete_crashed/1,
          purge/1, purge_acks/1,
-         publish/6, publish_delivered/5, discard/4, drain_confirmed/1,
+         publish/6, publish_delivered/5,
+         batch_publish/4, batch_publish_delivered/4,
+         discard/4, drain_confirmed/1,
          dropwhile/2, fetchwhile/4, fetch/2, drop/2, ack/2, requeue/2,
          ackfold/4, fold/3, len/1, is_empty/1, depth/1,
          set_ram_duration_target/2, ram_duration/1, needs_timeout/1, timeout/1,
          handle_pre_hibernate/1, resume/1, msg_rates/1,
-         info/2, invoke/3, is_duplicate/2, multiple_routing_keys/0]).
+         info/2, invoke/3, is_duplicate/2, set_queue_mode/2,
+         zip_msgs_and_acks/4,  multiple_routing_keys/0]).
 
 -export([start/1, stop/0]).
 
           disk_read_count,
           disk_write_count,
 
-          io_batch_size
+          io_batch_size,
+
+          %% default queue or lazy queue
+          mode,
+          %% number of reduce_memory_usage executions, once it
+          %% reaches a threshold the queue will manually trigger a runtime GC
+               %% see: maybe_execute_gc/1
+          memory_reduction_run_count
         }).
 
 -record(rates, { in, out, ack_in, ack_out, timestamp }).
 
 -rabbit_upgrade({multiple_routing_keys, local, []}).
 
--ifdef(use_specs).
-
--type(timestamp() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}).
--type(seq_id()  :: non_neg_integer()).
+-type seq_id()  :: non_neg_integer().
 
--type(rates() :: #rates { in        :: float(),
+-type rates() :: #rates { in        :: float(),
                           out       :: float(),
                           ack_in    :: float(),
                           ack_out   :: float(),
-                          timestamp :: timestamp()}).
+                          timestamp :: rabbit_types:timestamp()}.
 
--type(delta() :: #delta { start_seq_id :: non_neg_integer(),
+-type delta() :: #delta { start_seq_id :: non_neg_integer(),
                           count        :: non_neg_integer(),
-                          end_seq_id   :: non_neg_integer() }).
+                          end_seq_id   :: non_neg_integer() }.
 
 %% The compiler (rightfully) complains that ack() and state() are
 %% unused. For this reason we duplicate a -spec from
 %% warnings. The problem here is that we can't parameterise the BQ
 %% behaviour by these two types as we would like to. We still leave
 %% these here for documentation purposes.
--type(ack() :: seq_id()).
--type(state() :: #vqstate {
+-type ack() :: seq_id().
+-type state() :: #vqstate {
              q1                    :: ?QUEUE:?QUEUE(),
              q2                    :: ?QUEUE:?QUEUE(),
              delta                 :: delta(),
              out_counter           :: non_neg_integer(),
              in_counter            :: non_neg_integer(),
              rates                 :: rates(),
-             msgs_on_disk          :: gb_sets:set(),
-             msg_indices_on_disk   :: gb_sets:set(),
-             unconfirmed           :: gb_sets:set(),
-             confirmed             :: gb_sets:set(),
+             msgs_on_disk          :: ?GB_SET_TYPE(),
+             msg_indices_on_disk   :: ?GB_SET_TYPE(),
+             unconfirmed           :: ?GB_SET_TYPE(),
+             confirmed             :: ?GB_SET_TYPE(),
              ack_out_counter       :: non_neg_integer(),
              ack_in_counter        :: non_neg_integer(),
              disk_read_count       :: non_neg_integer(),
              disk_write_count      :: non_neg_integer(),
 
-             io_batch_size         :: pos_integer()}).
+             io_batch_size         :: pos_integer(),
+             mode                  :: 'default' | 'lazy',
+             memory_reduction_run_count :: non_neg_integer()}.
 %% Duplicated from rabbit_backing_queue
--spec(ack/2 :: ([ack()], state()) -> {[rabbit_guid:guid()], state()}).
-
--spec(multiple_routing_keys/0 :: () -> 'ok').
+-spec ack([ack()], state()) -> {[rabbit_guid:guid()], state()}.
 
--endif.
+-spec multiple_routing_keys() -> 'ok'.
 
 -define(BLANK_DELTA, #delta { start_seq_id = undefined,
                               count        = 0,
 %% rabbit_amqqueue_process need fairly fresh rates.
 -define(MSGS_PER_RATE_CALC, 100).
 
+
+%% we define the garbage collector threshold
+%% it needs to tune the GC calls inside `reduce_memory_use`
+%% see: rabbitmq-server-973 and `maybe_execute_gc` function
+-define(DEFAULT_EXPLICIT_GC_RUN_OP_THRESHOLD, 250).
+-define(EXPLICIT_GC_RUN_OP_THRESHOLD,
+    case get(explicit_gc_run_operation_threshold) of
+        undefined ->
+            Val = rabbit_misc:get_env(rabbit, lazy_queue_explicit_gc_run_operation_threshold,
+                ?DEFAULT_EXPLICIT_GC_RUN_OP_THRESHOLD),
+            put(explicit_gc_run_operation_threshold, Val),
+            Val;
+        Val       -> Val
+    end).
+
 %%----------------------------------------------------------------------------
 %% Public API
 %%----------------------------------------------------------------------------
@@ -550,7 +572,7 @@ delete_crashed(#amqqueue{name = QName}) ->
     ok = rabbit_queue_index:erase(QName).
 
 purge(State = #vqstate { len = Len }) ->
-    case is_pending_ack_empty(State) of
+    case is_pending_ack_empty(State) and is_unconfirmed_empty(State) of
         true ->
             {Len, purge_and_index_reset(State)};
         false ->
@@ -559,52 +581,32 @@ purge(State = #vqstate { len = Len }) ->
 
 purge_acks(State) -> a(purge_pending_ack(false, State)).
 
-publish(Msg = #basic_message { is_persistent = IsPersistent, id = MsgId },
-        MsgProps = #message_properties { needs_confirming = NeedsConfirming },
-        IsDelivered, _ChPid, _Flow,
-        State = #vqstate { q1 = Q1, q3 = Q3, q4 = Q4,
-                           qi_embed_msgs_below = IndexMaxSize,
-                           next_seq_id         = SeqId,
-                           in_counter          = InCount,
-                           durable             = IsDurable,
-                           unconfirmed         = UC }) ->
-    IsPersistent1 = IsDurable andalso IsPersistent,
-    MsgStatus = msg_status(IsPersistent1, IsDelivered, SeqId, Msg, MsgProps, IndexMaxSize),
-    {MsgStatus1, State1} = maybe_write_to_disk(false, false, MsgStatus, State),
-    State2 = case ?QUEUE:is_empty(Q3) of
-                 false -> State1 #vqstate { q1 = ?QUEUE:in(m(MsgStatus1), Q1) };
-                 true  -> State1 #vqstate { q4 = ?QUEUE:in(m(MsgStatus1), Q4) }
-             end,
-    InCount1 = InCount + 1,
-    UC1 = gb_sets_maybe_insert(NeedsConfirming, MsgId, UC),
-    State3 = stats({1, 0}, {none, MsgStatus1},
-                   State2#vqstate{ next_seq_id = SeqId + 1,
-                                   in_counter  = InCount1,
-                                   unconfirmed = UC1 }),
-    a(reduce_memory_use(maybe_update_rates(State3))).
-
-publish_delivered(Msg = #basic_message { is_persistent = IsPersistent,
-                                         id = MsgId },
-                  MsgProps = #message_properties {
-                    needs_confirming = NeedsConfirming },
-                  _ChPid, _Flow,
-                  State = #vqstate { qi_embed_msgs_below = IndexMaxSize,
-                                     next_seq_id         = SeqId,
-                                     out_counter         = OutCount,
-                                     in_counter          = InCount,
-                                     durable             = IsDurable,
-                                     unconfirmed         = UC }) ->
-    IsPersistent1 = IsDurable andalso IsPersistent,
-    MsgStatus = msg_status(IsPersistent1, true, SeqId, Msg, MsgProps, IndexMaxSize),
-    {MsgStatus1, State1} = maybe_write_to_disk(false, false, MsgStatus, State),
-    State2 = record_pending_ack(m(MsgStatus1), State1),
-    UC1 = gb_sets_maybe_insert(NeedsConfirming, MsgId, UC),
-    State3 = stats({0, 1}, {none, MsgStatus1},
-                   State2 #vqstate { next_seq_id      = SeqId    + 1,
-                                     out_counter      = OutCount + 1,
-                                     in_counter       = InCount  + 1,
-                                     unconfirmed      = UC1 }),
-    {SeqId, a(reduce_memory_use(maybe_update_rates(State3)))}.
+publish(Msg, MsgProps, IsDelivered, ChPid, Flow, State) ->
+    State1 =
+        publish1(Msg, MsgProps, IsDelivered, ChPid, Flow,
+                 fun maybe_write_to_disk/4,
+                 State),
+    a(reduce_memory_use(maybe_update_rates(State1))).
+
+batch_publish(Publishes, ChPid, Flow, State) ->
+    {ChPid, Flow, State1} =
+        lists:foldl(fun batch_publish1/2, {ChPid, Flow, State}, Publishes),
+    State2 = ui(State1),
+    a(reduce_memory_use(maybe_update_rates(State2))).
+
+publish_delivered(Msg, MsgProps, ChPid, Flow, State) ->
+    {SeqId, State1} =
+        publish_delivered1(Msg, MsgProps, ChPid, Flow,
+                           fun maybe_write_to_disk/4,
+                           State),
+    {SeqId, a(reduce_memory_use(maybe_update_rates(State1)))}.
+
+batch_publish_delivered(Publishes, ChPid, Flow, State) ->
+    {ChPid, Flow, SeqIds, State1} =
+        lists:foldl(fun batch_publish_delivered1/2,
+                    {ChPid, Flow, [], State}, Publishes),
+    State2 = ui(State1),
+    {lists:reverse(SeqIds), a(reduce_memory_use(maybe_update_rates(State2)))}.
 
 discard(_MsgId, _ChPid, _Flow, State) -> State.
 
@@ -651,25 +653,28 @@ ack([], State) ->
 %% optimisation: this head is essentially a partial evaluation of the
 %% general case below, for the single-ack case.
 ack([SeqId], State) ->
-    {#msg_status { msg_id        = MsgId,
-                   is_persistent = IsPersistent,
-                   msg_in_store  = MsgInStore,
-                   index_on_disk = IndexOnDisk },
-     State1 = #vqstate { index_state       = IndexState,
-                         msg_store_clients = MSCState,
-                         ack_out_counter   = AckOutCount }} =
-        remove_pending_ack(true, SeqId, State),
-    IndexState1 = case IndexOnDisk of
-                      true  -> rabbit_queue_index:ack([SeqId], IndexState);
-                      false -> IndexState
-                  end,
-    case MsgInStore of
-        true  -> ok = msg_store_remove(MSCState, IsPersistent, [MsgId]);
-        false -> ok
-    end,
-    {[MsgId],
-     a(State1 #vqstate { index_state      = IndexState1,
-                         ack_out_counter  = AckOutCount + 1 })};
+    case remove_pending_ack(true, SeqId, State) of
+        {none, _} ->
+            State;
+        {#msg_status { msg_id        = MsgId,
+                       is_persistent = IsPersistent,
+                       msg_in_store  = MsgInStore,
+                       index_on_disk = IndexOnDisk },
+         State1 = #vqstate { index_state       = IndexState,
+                             msg_store_clients = MSCState,
+                             ack_out_counter   = AckOutCount }} ->
+            IndexState1 = case IndexOnDisk of
+                              true  -> rabbit_queue_index:ack([SeqId], IndexState);
+                              false -> IndexState
+                          end,
+            case MsgInStore of
+                true  -> ok = msg_store_remove(MSCState, IsPersistent, [MsgId]);
+                false -> ok
+            end,
+            {[MsgId],
+             a(State1 #vqstate { index_state      = IndexState1,
+                                 ack_out_counter  = AckOutCount + 1 })}
+    end;
 ack(AckTags, State) ->
     {{IndexOnDiskSeqIds, MsgIdsByStore, AllMsgIds},
      State1 = #vqstate { index_state       = IndexState,
@@ -677,8 +682,12 @@ ack(AckTags, State) ->
                          ack_out_counter   = AckOutCount }} =
         lists:foldl(
           fun (SeqId, {Acc, State2}) ->
-                  {MsgStatus, State3} = remove_pending_ack(true, SeqId, State2),
-                  {accumulate_ack(MsgStatus, Acc), State3}
+                  case remove_pending_ack(true, SeqId, State2) of
+                      {none, _} ->
+                          {Acc, State2};
+                      {MsgStatus, State3} ->
+                          {accumulate_ack(MsgStatus, Acc), State3}
+                  end
           end, {accumulate_ack_init(), State}, AckTags),
     IndexState1 = rabbit_queue_index:ack(IndexOnDiskSeqIds, IndexState),
     remove_msgs_by_id(MsgIdsByStore, MSCState),
@@ -686,7 +695,8 @@ ack(AckTags, State) ->
      a(State1 #vqstate { index_state      = IndexState1,
                          ack_out_counter  = AckOutCount + length(AckTags) })}.
 
-requeue(AckTags, #vqstate { delta      = Delta,
+requeue(AckTags, #vqstate { mode       = default,
+                            delta      = Delta,
                             q3         = Q3,
                             q4         = Q4,
                             in_counter = InCounter,
@@ -701,12 +711,29 @@ requeue(AckTags, #vqstate { delta      = Delta,
                                                   State2),
     MsgCount = length(MsgIds2),
     {MsgIds2, a(reduce_memory_use(
-                  maybe_update_rates(
+                  maybe_update_rates(ui(
                     State3 #vqstate { delta      = Delta1,
                                       q3         = Q3a,
                                       q4         = Q4a,
                                       in_counter = InCounter + MsgCount,
-                                      len        = Len + MsgCount })))}.
+                                      len        = Len + MsgCount }))))};
+requeue(AckTags, #vqstate { mode       = lazy,
+                            delta      = Delta,
+                            q3         = Q3,
+                            in_counter = InCounter,
+                            len        = Len } = State) ->
+    {SeqIds, Q3a, MsgIds, State1} = queue_merge(lists:sort(AckTags), Q3, [],
+                                                delta_limit(Delta),
+                                                fun publish_beta/2, State),
+    {Delta1, MsgIds1, State2}     = delta_merge(SeqIds, Delta, MsgIds,
+                                                State1),
+    MsgCount = length(MsgIds1),
+    {MsgIds1, a(reduce_memory_use(
+                  maybe_update_rates(ui(
+                    State2 #vqstate { delta      = Delta1,
+                                      q3         = Q3a,
+                                      in_counter = InCounter + MsgCount,
+                                      len        = Len + MsgCount }))))}.
 
 ackfold(MsgFun, Acc, State, AckTags) ->
     {AccN, StateN} =
@@ -770,7 +797,7 @@ update_rates(State = #vqstate{ in_counter      =     InCount,
                                                ack_in    =  AckInRate,
                                                ack_out   = AckOutRate,
                                                timestamp = TS }}) ->
-    Now = erlang:now(),
+    Now = time_compat:monotonic_time(),
 
     Rates = #rates { in        = update_rate(Now, TS,     InCount,     InRate),
                      out       = update_rate(Now, TS,    OutCount,    OutRate),
@@ -785,8 +812,13 @@ update_rates(State = #vqstate{ in_counter      =     InCount,
                    rates           = Rates }.
 
 update_rate(Now, TS, Count, Rate) ->
-    Time = timer:now_diff(Now, TS) / ?MICROS_PER_SECOND,
-    rabbit_misc:moving_average(Time, ?RATE_AVG_HALF_LIFE, Count / Time, Rate).
+    Time = time_compat:convert_time_unit(Now - TS, native, micro_seconds) /
+        ?MICROS_PER_SECOND,
+    if
+        Time == 0 -> Rate;
+        true      -> rabbit_misc:moving_average(Time, ?RATE_AVG_HALF_LIFE,
+                                                Count / Time, Rate)
+    end.
 
 ram_duration(State) ->
     State1 = #vqstate { rates = #rates { in      = AvgIngressRate,
@@ -854,12 +886,19 @@ info(message_bytes_ram, #vqstate{ram_bytes = RamBytes}) ->
     RamBytes;
 info(message_bytes_persistent, #vqstate{persistent_bytes = PersistentBytes}) ->
     PersistentBytes;
+info(head_message_timestamp, #vqstate{
+          q3               = Q3,
+          q4               = Q4,
+          ram_pending_ack  = RPA,
+          qi_pending_ack   = QPA}) ->
+          head_message_timestamp(Q3, Q4, RPA, QPA);
 info(disk_reads, #vqstate{disk_read_count = Count}) ->
     Count;
 info(disk_writes, #vqstate{disk_write_count = Count}) ->
     Count;
 info(backing_queue_status, #vqstate {
           q1 = Q1, q2 = Q2, delta = Delta, q3 = Q3, q4 = Q4,
+          mode             = Mode,
           len              = Len,
           target_ram_count = TargetRamCount,
           next_seq_id      = NextSeqId,
@@ -868,7 +907,8 @@ info(backing_queue_status, #vqstate {
                                       ack_in  = AvgAckIngressRate,
                                       ack_out = AvgAckEgressRate }}) ->
 
-    [ {q1                  , ?QUEUE:len(Q1)},
+    [ {mode                , Mode},
+      {q1                  , ?QUEUE:len(Q1)},
       {q2                  , ?QUEUE:len(Q2)},
       {delta               , Delta},
       {q3                  , ?QUEUE:len(Q3)},
@@ -888,11 +928,113 @@ invoke(      _,   _, State) -> State.
 
 is_duplicate(_Msg, State) -> {false, State}.
 
+set_queue_mode(Mode, State = #vqstate { mode = Mode }) ->
+    State;
+set_queue_mode(lazy, State = #vqstate {
+                                target_ram_count = TargetRamCount }) ->
+    %% To become a lazy queue we need to page everything to disk first.
+    State1 = convert_to_lazy(State),
+    %% restore the original target_ram_count
+    a(State1 #vqstate { mode = lazy, target_ram_count = TargetRamCount });
+set_queue_mode(default, State) ->
+    %% becoming a default queue means loading messages from disk like
+    %% when a queue is recovered.
+    a(maybe_deltas_to_betas(State #vqstate { mode = default }));
+set_queue_mode(_, State) ->
+    State.
+
+zip_msgs_and_acks(Msgs, AckTags, Accumulator, _State) ->
+    lists:foldl(fun ({{#basic_message{ id = Id }, _Props}, AckTag}, Acc) ->
+                        [{Id, AckTag} | Acc]
+                end, Accumulator, lists:zip(Msgs, AckTags)).
+
+convert_to_lazy(State) ->
+    State1 = #vqstate { delta = Delta, q3 = Q3, len = Len } =
+        set_ram_duration_target(0, State),
+    case Delta#delta.count + ?QUEUE:len(Q3) == Len of
+        true ->
+            State1;
+        false ->
+            %% When pushing messages to disk, we might have been
+            %% blocked by the msg_store, so we need to see if we have
+            %% to wait for more credit, and then keep paging messages.
+            %%
+            %% The amqqueue_process could have taken care of this, but
+            %% between the time it receives the bump_credit msg and
+            %% calls BQ:resume to keep paging messages to disk, some
+            %% other request may arrive to the BQ which at this moment
+            %% is not in a proper state for a lazy BQ (unless all
+            %% messages have been paged to disk already).
+            wait_for_msg_store_credit(),
+            convert_to_lazy(resume(State1))
+    end.
+
+wait_for_msg_store_credit() ->
+    case credit_flow:blocked() of
+        true  -> receive
+                     {bump_credit, Msg} ->
+                         credit_flow:handle_bump_msg(Msg)
+                 end;
+        false -> ok
+    end.
+
+%% Get the Timestamp property of the first msg, if present. This is
+%% the one with the oldest timestamp among the heads of the pending
+%% acks and unread queues.  We can't check disk_pending_acks as these
+%% are paged out - we assume some will soon be paged in rather than
+%% forcing it to happen.  Pending ack msgs are included as they are
+%% regarded as unprocessed until acked, this also prevents the result
+%% apparently oscillating during repeated rejects.  Q3 is only checked
+%% when Q4 is empty as any Q4 msg will be earlier.
+head_message_timestamp(Q3, Q4, RPA, QPA) ->
+    HeadMsgs = [ HeadMsgStatus#msg_status.msg ||
+                   HeadMsgStatus <-
+                       [ get_qs_head([Q4, Q3]),
+                         get_pa_head(RPA),
+                         get_pa_head(QPA) ],
+                   HeadMsgStatus /= undefined,
+                   HeadMsgStatus#msg_status.msg /= undefined ],
+
+    Timestamps =
+        [Timestamp || HeadMsg <- HeadMsgs,
+                      Timestamp <- [rabbit_basic:extract_timestamp(
+                                      HeadMsg#basic_message.content)],
+                      Timestamp /= undefined
+        ],
+
+    case Timestamps == [] of
+        true -> '';
+        false -> lists:min(Timestamps)
+    end.
+
+get_qs_head(Qs) ->
+    catch lists:foldl(
+            fun (Q, Acc) ->
+                    case get_q_head(Q) of
+                        undefined -> Acc;
+                        Val -> throw(Val)
+                    end
+            end, undefined, Qs).
+
+get_q_head(Q) ->
+    get_collection_head(Q, fun ?QUEUE:is_empty/1, fun ?QUEUE:peek/1).
+
+get_pa_head(PA) ->
+    get_collection_head(PA, fun gb_trees:is_empty/1, fun gb_trees:smallest/1).
+
+get_collection_head(Col, IsEmpty, GetVal) ->
+    case IsEmpty(Col) of
+        false ->
+            {_, MsgStatus} = GetVal(Col),
+            MsgStatus;
+        true  -> undefined
+    end.
+
 %%----------------------------------------------------------------------------
 %% Minor helpers
 %%----------------------------------------------------------------------------
-
 a(State = #vqstate { q1 = Q1, q2 = Q2, delta = Delta, q3 = Q3, q4 = Q4,
+                     mode             = default,
                      len              = Len,
                      bytes            = Bytes,
                      unacked_bytes    = UnackedBytes,
@@ -907,9 +1049,16 @@ a(State = #vqstate { q1 = Q1, q2 = Q2, delta = Delta, q3 = Q3, q4 = Q4,
     E4 = ?QUEUE:is_empty(Q4),
     LZ = Len == 0,
 
+    %% if q1 has messages then q3 cannot be empty. See publish/6.
     true = E1 or not E3,
+    %% if q2 has messages then we have messages in delta (paged to
+    %% disk). See push_alphas_to_betas/2.
     true = E2 or not ED,
+    %% if delta has messages then q3 cannot be empty. This is enforced
+    %% by paging, where min([?SEGMENT_ENTRY_COUNT, len(q3)]) messages
+    %% are always kept on RAM.
     true = ED or not E3,
+    %% if the queue length is 0, then q3 and q4 must be empty.
     true = LZ == (E3 and E4),
 
     true = Len             >= 0,
@@ -922,6 +1071,53 @@ a(State = #vqstate { q1 = Q1, q2 = Q2, delta = Delta, q3 = Q3, q4 = Q4,
     true = RamBytes        >= 0,
     true = RamBytes        =< Bytes + UnackedBytes,
 
+    State;
+a(State = #vqstate { q1 = Q1, q2 = Q2, delta = Delta, q3 = Q3, q4 = Q4,
+                     mode             = lazy,
+                     len              = Len,
+                     bytes            = Bytes,
+                     unacked_bytes    = UnackedBytes,
+                     persistent_count = PersistentCount,
+                     persistent_bytes = PersistentBytes,
+                     ram_msg_count    = RamMsgCount,
+                     ram_bytes        = RamBytes}) ->
+    E1 = ?QUEUE:is_empty(Q1),
+    E2 = ?QUEUE:is_empty(Q2),
+    ED = Delta#delta.count == 0,
+    E3 = ?QUEUE:is_empty(Q3),
+    E4 = ?QUEUE:is_empty(Q4),
+    LZ = Len == 0,
+    L3 = ?QUEUE:len(Q3),
+
+    %% q1 must always be empty, since q1 only gets messages during
+    %% publish, but for lazy queues messages go straight to delta.
+    true = E1,
+
+    %% q2 only gets messages from q1 when push_alphas_to_betas is
+    %% called for a non empty delta, which won't be the case for a
+    %% lazy queue. This means q2 must always be empty.
+    true = E2,
+
+    %% q4 must always be empty, since q1 only gets messages during
+    %% publish, but for lazy queues messages go straight to delta.
+    true = E4,
+
+    %% if the queue is empty, then delta is empty and q3 is empty.
+    true = LZ == (ED and E3),
+
+    %% There should be no messages in q1, q2, and q4
+    true = Delta#delta.count + L3 == Len,
+
+    true = Len             >= 0,
+    true = Bytes           >= 0,
+    true = UnackedBytes    >= 0,
+    true = PersistentCount >= 0,
+    true = PersistentBytes >= 0,
+    true = RamMsgCount     >= 0,
+    true = RamMsgCount     =< Len,
+    true = RamBytes        >= 0,
+    true = RamBytes        =< Bytes + UnackedBytes,
+
     State.
 
 d(Delta = #delta { start_seq_id = Start, count = Count, end_seq_id = End })
@@ -1114,7 +1310,7 @@ init(IsDurable, IndexState, DeltaCount, DeltaBytes, Terms,
                                     count        = DeltaCount1,
                                     end_seq_id   = NextSeqId })
             end,
-    Now = now(),
+    Now = time_compat:monotonic_time(),
     IoBatchSize = rabbit_misc:get_env(rabbit, msg_store_io_batch_size,
                                       ?IO_BATCH_SIZE),
 
@@ -1159,7 +1355,10 @@ init(IsDurable, IndexState, DeltaCount, DeltaBytes, Terms,
       disk_read_count     = 0,
       disk_write_count    = 0,
 
-      io_batch_size       = IoBatchSize },
+      io_batch_size       = IoBatchSize,
+
+      mode                = default,
+      memory_reduction_run_count = 0},
     a(maybe_deltas_to_betas(State)).
 
 blank_rates(Now) ->
@@ -1170,7 +1369,7 @@ blank_rates(Now) ->
              timestamp = Now}.
 
 in_r(MsgStatus = #msg_status { msg = undefined },
-     State = #vqstate { q3 = Q3, q4 = Q4 }) ->
+     State = #vqstate { mode = default, q3 = Q3, q4 = Q4 }) ->
     case ?QUEUE:is_empty(Q4) of
         true  -> State #vqstate { q3 = ?QUEUE:in_r(MsgStatus, Q3) };
         false -> {Msg, State1 = #vqstate { q4 = Q4a }} =
@@ -1179,10 +1378,24 @@ in_r(MsgStatus = #msg_status { msg = undefined },
                  stats(ready0, {MsgStatus, MsgStatus1},
                        State1 #vqstate { q4 = ?QUEUE:in_r(MsgStatus1, Q4a) })
     end;
-in_r(MsgStatus, State = #vqstate { q4 = Q4 }) ->
-    State #vqstate { q4 = ?QUEUE:in_r(MsgStatus, Q4) }.
+in_r(MsgStatus,
+     State = #vqstate { mode = default, q4 = Q4 }) ->
+    State #vqstate { q4 = ?QUEUE:in_r(MsgStatus, Q4) };
+%% lazy queues
+in_r(MsgStatus = #msg_status { seq_id = SeqId },
+     State = #vqstate { mode = lazy, q3 = Q3, delta = Delta}) ->
+    case ?QUEUE:is_empty(Q3) of
+        true  ->
+            {_MsgStatus1, State1} =
+                maybe_write_to_disk(true, true, MsgStatus, State),
+            State2 = stats(ready0, {MsgStatus, none}, State1),
+            Delta1 = expand_delta(SeqId, Delta),
+            State2 #vqstate{ delta = Delta1 };
+        false ->
+            State #vqstate { q3 = ?QUEUE:in_r(MsgStatus, Q3) }
+    end.
 
-queue_out(State = #vqstate { q4 = Q4 }) ->
+queue_out(State = #vqstate { mode = default, q4 = Q4 }) ->
     case ?QUEUE:out(Q4) of
         {empty, _Q4} ->
             case fetch_from_q3(State) of
@@ -1191,6 +1404,12 @@ queue_out(State = #vqstate { q4 = Q4 }) ->
             end;
         {{value, MsgStatus}, Q4a} ->
             {{value, MsgStatus}, State #vqstate { q4 = Q4a }}
+    end;
+%% lazy queues
+queue_out(State = #vqstate { mode = lazy }) ->
+    case fetch_from_q3(State) of
+        {empty, _State1} = Result     -> Result;
+        {loaded, {MsgStatus, State1}} -> {{value, MsgStatus}, State1}
     end.
 
 read_msg(#msg_status{msg           = undefined,
@@ -1210,11 +1429,13 @@ read_msg(MsgId, IsPersistent, State = #vqstate{msg_store_clients = MSCState,
 stats(Signs, Statuses, State) ->
     stats0(expand_signs(Signs), expand_statuses(Statuses), State).
 
-expand_signs(ready0)   -> {0, 0, true};
-expand_signs({A, B})   -> {A, B, false}.
+expand_signs(ready0)        -> {0, 0, true};
+expand_signs(lazy_pub)      -> {1, 0, true};
+expand_signs({A, B})        -> {A, B, false}.
 
 expand_statuses({none, A})    -> {false,         msg_in_ram(A), A};
 expand_statuses({B,    none}) -> {msg_in_ram(B), false,         B};
+expand_statuses({lazy, A})    -> {false        , false,         A};
 expand_statuses({B,    A})    -> {msg_in_ram(B), msg_in_ram(A), B}.
 
 %% In this function at least, we are religious: the variable name
@@ -1451,15 +1672,24 @@ reset_qi_state(State = #vqstate{index_state = IndexState}) ->
 is_pending_ack_empty(State) ->
     count_pending_acks(State) =:= 0.
 
+is_unconfirmed_empty(#vqstate { unconfirmed = UC }) ->
+    gb_sets:is_empty(UC).
+
 count_pending_acks(#vqstate { ram_pending_ack   = RPA,
                               disk_pending_ack  = DPA,
                               qi_pending_ack    = QPA }) ->
     gb_trees:size(RPA) + gb_trees:size(DPA) + gb_trees:size(QPA).
 
-purge_betas_and_deltas(DelsAndAcksFun, State = #vqstate { q3 = Q3 }) ->
+purge_betas_and_deltas(DelsAndAcksFun, State = #vqstate { mode = Mode }) ->
+    State0 = #vqstate { q3 = Q3 } =
+        case Mode of
+            lazy -> maybe_deltas_to_betas(DelsAndAcksFun, State);
+            _    -> State
+        end,
+
     case ?QUEUE:is_empty(Q3) of
-        true  -> State;
-        false -> State1 = remove_queue_entries(Q3, DelsAndAcksFun, State),
+        true  -> State0;
+        false -> State1 = remove_queue_entries(Q3, DelsAndAcksFun, State0),
                  purge_betas_and_deltas(DelsAndAcksFun,
                                         maybe_deltas_to_betas(
                                           DelsAndAcksFun,
@@ -1503,6 +1733,108 @@ process_delivers_and_acks_fun(_) ->
 %% Internal gubbins for publishing
 %%----------------------------------------------------------------------------
 
+publish1(Msg = #basic_message { is_persistent = IsPersistent, id = MsgId },
+         MsgProps = #message_properties { needs_confirming = NeedsConfirming },
+         IsDelivered, _ChPid, _Flow, PersistFun,
+         State = #vqstate { q1 = Q1, q3 = Q3, q4 = Q4,
+                            mode                = default,
+                            qi_embed_msgs_below = IndexMaxSize,
+                            next_seq_id         = SeqId,
+                            in_counter          = InCount,
+                            durable             = IsDurable,
+                            unconfirmed         = UC }) ->
+    IsPersistent1 = IsDurable andalso IsPersistent,
+    MsgStatus = msg_status(IsPersistent1, IsDelivered, SeqId, Msg, MsgProps, IndexMaxSize),
+    {MsgStatus1, State1} = PersistFun(false, false, MsgStatus, State),
+    State2 = case ?QUEUE:is_empty(Q3) of
+                 false -> State1 #vqstate { q1 = ?QUEUE:in(m(MsgStatus1), Q1) };
+                 true  -> State1 #vqstate { q4 = ?QUEUE:in(m(MsgStatus1), Q4) }
+             end,
+    InCount1 = InCount + 1,
+    UC1 = gb_sets_maybe_insert(NeedsConfirming, MsgId, UC),
+    stats({1, 0}, {none, MsgStatus1},
+          State2#vqstate{ next_seq_id = SeqId + 1,
+                          in_counter  = InCount1,
+                          unconfirmed = UC1 });
+publish1(Msg = #basic_message { is_persistent = IsPersistent, id = MsgId },
+             MsgProps = #message_properties { needs_confirming = NeedsConfirming },
+             IsDelivered, _ChPid, _Flow, PersistFun,
+             State = #vqstate { mode                = lazy,
+                                qi_embed_msgs_below = IndexMaxSize,
+                                next_seq_id         = SeqId,
+                                in_counter          = InCount,
+                                durable             = IsDurable,
+                                unconfirmed         = UC,
+                                delta               = Delta }) ->
+    IsPersistent1 = IsDurable andalso IsPersistent,
+    MsgStatus = msg_status(IsPersistent1, IsDelivered, SeqId, Msg, MsgProps, IndexMaxSize),
+    {MsgStatus1, State1} = PersistFun(true, true, MsgStatus, State),
+    Delta1 = expand_delta(SeqId, Delta),
+    UC1 = gb_sets_maybe_insert(NeedsConfirming, MsgId, UC),
+    stats(lazy_pub, {lazy, m(MsgStatus1)},
+          State1#vqstate{ delta       = Delta1,
+                          next_seq_id = SeqId + 1,
+                          in_counter  = InCount + 1,
+                          unconfirmed = UC1 }).
+
+batch_publish1({Msg, MsgProps, IsDelivered}, {ChPid, Flow, State}) ->
+    {ChPid, Flow, publish1(Msg, MsgProps, IsDelivered, ChPid, Flow,
+                           fun maybe_prepare_write_to_disk/4, State)}.
+
+publish_delivered1(Msg = #basic_message { is_persistent = IsPersistent,
+                                          id = MsgId },
+                   MsgProps = #message_properties {
+                                 needs_confirming = NeedsConfirming },
+                   _ChPid, _Flow, PersistFun,
+                   State = #vqstate { mode                = default,
+                                      qi_embed_msgs_below = IndexMaxSize,
+                                      next_seq_id         = SeqId,
+                                      out_counter         = OutCount,
+                                      in_counter          = InCount,
+                                      durable             = IsDurable,
+                                      unconfirmed         = UC }) ->
+    IsPersistent1 = IsDurable andalso IsPersistent,
+    MsgStatus = msg_status(IsPersistent1, true, SeqId, Msg, MsgProps, IndexMaxSize),
+    {MsgStatus1, State1} = PersistFun(false, false, MsgStatus, State),
+    State2 = record_pending_ack(m(MsgStatus1), State1),
+    UC1 = gb_sets_maybe_insert(NeedsConfirming, MsgId, UC),
+    State3 = stats({0, 1}, {none, MsgStatus1},
+                   State2 #vqstate { next_seq_id      = SeqId    + 1,
+                                     out_counter      = OutCount + 1,
+                                     in_counter       = InCount  + 1,
+                                     unconfirmed      = UC1 }),
+    {SeqId, State3};
+publish_delivered1(Msg = #basic_message { is_persistent = IsPersistent,
+                                          id = MsgId },
+                   MsgProps = #message_properties {
+                                 needs_confirming = NeedsConfirming },
+                   _ChPid, _Flow, PersistFun,
+                   State = #vqstate { mode                = lazy,
+                                      qi_embed_msgs_below = IndexMaxSize,
+                                      next_seq_id         = SeqId,
+                                      out_counter         = OutCount,
+                                      in_counter          = InCount,
+                                      durable             = IsDurable,
+                                      unconfirmed         = UC }) ->
+    IsPersistent1 = IsDurable andalso IsPersistent,
+    MsgStatus = msg_status(IsPersistent1, true, SeqId, Msg, MsgProps, IndexMaxSize),
+    {MsgStatus1, State1} = PersistFun(true, true, MsgStatus, State),
+    State2 = record_pending_ack(m(MsgStatus1), State1),
+    UC1 = gb_sets_maybe_insert(NeedsConfirming, MsgId, UC),
+    State3 = stats({0, 1}, {none, MsgStatus1},
+                   State2 #vqstate { next_seq_id      = SeqId    + 1,
+                                     out_counter      = OutCount + 1,
+                                     in_counter       = InCount  + 1,
+                                     unconfirmed      = UC1 }),
+    {SeqId, State3}.
+
+batch_publish_delivered1({Msg, MsgProps}, {ChPid, Flow, SeqIds, State}) ->
+    {SeqId, State1} =
+        publish_delivered1(Msg, MsgProps, ChPid, Flow,
+                           fun maybe_prepare_write_to_disk/4,
+                           State),
+    {ChPid, Flow, [SeqId | SeqIds], State1}.
+
 maybe_write_msg_to_disk(_Force, MsgStatus = #msg_status {
                                   msg_in_store = true }, State) ->
     {MsgStatus, State};
@@ -1673,8 +2005,12 @@ lookup_pending_ack(SeqId, #vqstate { ram_pending_ack  = RPA,
 
 %% First parameter = UpdateStats
 remove_pending_ack(true, SeqId, State) ->
-    {MsgStatus, State1} = remove_pending_ack(false, SeqId, State),
-    {MsgStatus, stats({0, -1}, {MsgStatus, none}, State1)};
+    case remove_pending_ack(false, SeqId, State) of
+        {none, _} ->
+            {none, State};
+        {MsgStatus, State1} ->
+            {MsgStatus, stats({0, -1}, {MsgStatus, none}, State1)}
+    end;
 remove_pending_ack(false, SeqId, State = #vqstate{ram_pending_ack  = RPA,
                                                   disk_pending_ack = DPA,
                                                   qi_pending_ack   = QPA}) ->
@@ -1686,9 +2022,13 @@ remove_pending_ack(false, SeqId, State = #vqstate{ram_pending_ack  = RPA,
                               DPA1 = gb_trees:delete(SeqId, DPA),
                               {V, State#vqstate{disk_pending_ack = DPA1}};
                           none ->
-                              QPA1 = gb_trees:delete(SeqId, QPA),
-                              {gb_trees:get(SeqId, QPA),
-                               State#vqstate{qi_pending_ack = QPA1}}
+                              case gb_trees:lookup(SeqId, QPA) of
+                                  {value, V} ->
+                                      QPA1 = gb_trees:delete(SeqId, QPA),
+                                      {V, State#vqstate{qi_pending_ack = QPA1}};
+                                  none ->
+                                      {none, State}
+                              end
                       end
     end.
 
@@ -1819,7 +2159,7 @@ publish_alpha(MsgStatus, State) ->
     {MsgStatus, stats({1, -1}, {MsgStatus, MsgStatus}, State)}.
 
 publish_beta(MsgStatus, State) ->
-    {MsgStatus1, State1} = maybe_write_to_disk(true, false, MsgStatus, State),
+    {MsgStatus1, State1} = maybe_prepare_write_to_disk(true, false, MsgStatus, State),
     MsgStatus2 = m(trim_msg_status(MsgStatus1)),
     {MsgStatus2, stats({1, -1}, {MsgStatus, MsgStatus2}, State1)}.
 
@@ -1839,11 +2179,15 @@ queue_merge([SeqId | Rest] = SeqIds, Q, Front, MsgIds,
                         Limit, PubFun, State);
         {_, _Q1} ->
             %% enqueue from the remaining list of sequence ids
-            {MsgStatus, State1} = msg_from_pending_ack(SeqId, State),
-            {#msg_status { msg_id = MsgId } = MsgStatus1, State2} =
-                PubFun(MsgStatus, State1),
-            queue_merge(Rest, Q, ?QUEUE:in(MsgStatus1, Front), [MsgId | MsgIds],
-                        Limit, PubFun, State2)
+            case msg_from_pending_ack(SeqId, State) of
+                {none, _} ->
+                    queue_merge(Rest, Q, Front, MsgIds, Limit, PubFun, State);
+                {MsgStatus, State1} ->
+                    {#msg_status { msg_id = MsgId } = MsgStatus1, State2} =
+                        PubFun(MsgStatus, State1),
+                    queue_merge(Rest, Q, ?QUEUE:in(MsgStatus1, Front), [MsgId | MsgIds],
+                                Limit, PubFun, State2)
+            end
     end;
 queue_merge(SeqIds, Q, Front, MsgIds,
             _Limit, _PubFun, State) ->
@@ -1852,22 +2196,28 @@ queue_merge(SeqIds, Q, Front, MsgIds,
 delta_merge([], Delta, MsgIds, State) ->
     {Delta, MsgIds, State};
 delta_merge(SeqIds, Delta, MsgIds, State) ->
-    lists:foldl(fun (SeqId, {Delta0, MsgIds0, State0}) ->
-                        {#msg_status { msg_id = MsgId } = MsgStatus, State1} =
-                            msg_from_pending_ack(SeqId, State0),
-                        {_MsgStatus, State2} =
-                            maybe_write_to_disk(true, true, MsgStatus, State1),
-                        {expand_delta(SeqId, Delta0), [MsgId | MsgIds0],
-                         stats({1, -1}, {MsgStatus, none}, State2)}
+    lists:foldl(fun (SeqId, {Delta0, MsgIds0, State0} = Acc) ->
+                        case msg_from_pending_ack(SeqId, State0) of
+                            {none, _} ->
+                                Acc;
+                        {#msg_status { msg_id = MsgId } = MsgStatus, State1} ->
+                                {_MsgStatus, State2} =
+                                    maybe_prepare_write_to_disk(true, true, MsgStatus, State1),
+                                {expand_delta(SeqId, Delta0), [MsgId | MsgIds0],
+                                 stats({1, -1}, {MsgStatus, none}, State2)}
+                        end
                 end, {Delta, MsgIds, State}, SeqIds).
 
 %% Mostly opposite of record_pending_ack/2
 msg_from_pending_ack(SeqId, State) ->
-    {#msg_status { msg_props = MsgProps } = MsgStatus, State1} =
-        remove_pending_ack(false, SeqId, State),
-    {MsgStatus #msg_status {
-       msg_props = MsgProps #message_properties { needs_confirming = false } },
-     State1}.
+    case remove_pending_ack(false, SeqId, State) of
+        {none, _} ->
+            {none, State};
+        {#msg_status { msg_props = MsgProps } = MsgStatus, State1} ->
+            {MsgStatus #msg_status {
+               msg_props = MsgProps #message_properties { needs_confirming = false } },
+             State1}
+    end.
 
 beta_limit(Q) ->
     case ?QUEUE:peek(Q) of
@@ -1960,9 +2310,18 @@ ifold(Fun, Acc, Its, State) ->
 %% Phase changes
 %%----------------------------------------------------------------------------
 
+maybe_execute_gc(State = #vqstate {memory_reduction_run_count = MRedRunCount}) ->
+    case MRedRunCount >= ?EXPLICIT_GC_RUN_OP_THRESHOLD of
+       true -> garbage_collect(),
+                State#vqstate{memory_reduction_run_count =  0};
+        false ->    State#vqstate{memory_reduction_run_count =  MRedRunCount + 1}
+
+    end.
+
 reduce_memory_use(State = #vqstate { target_ram_count = infinity }) ->
     State;
 reduce_memory_use(State = #vqstate {
+                    mode             = default,
                     ram_pending_ack  = RPA,
                     ram_msg_count    = RamMsgCount,
                     target_ram_count = TargetRamCount,
@@ -2008,7 +2367,30 @@ reduce_memory_use(State = #vqstate {
         end,
     %% See rabbitmq-server-290 for the reasons behind this GC call.
     garbage_collect(),
-    State3.
+    State3;
+%% When using lazy queues, there are no alphas, so we don't need to
+%% call push_alphas_to_betas/2.
+reduce_memory_use(State = #vqstate {
+                             mode = lazy,
+                             ram_pending_ack  = RPA,
+                             ram_msg_count    = RamMsgCount,
+                             target_ram_count = TargetRamCount }) ->
+    State1 = #vqstate { q3 = Q3 } =
+        case chunk_size(RamMsgCount + gb_trees:size(RPA), TargetRamCount) of
+            0  -> State;
+            S1 -> {_, State2} = limit_ram_acks(S1, State),
+                  State2
+        end,
+
+    State3 =
+        case chunk_size(?QUEUE:len(Q3),
+                        permitted_beta_count(State1)) of
+            0  ->
+                State1;
+            S2 ->
+                push_betas_to_deltas(S2, State1)
+        end,
+    maybe_execute_gc(State3).
 
 limit_ram_acks(0, State) ->
     {0, ui(State)};
@@ -2031,6 +2413,9 @@ limit_ram_acks(Quota, State = #vqstate { ram_pending_ack  = RPA,
 
 permitted_beta_count(#vqstate { len = 0 }) ->
     infinity;
+permitted_beta_count(#vqstate { mode             = lazy,
+                                target_ram_count = TargetRamCount}) ->
+    TargetRamCount;
 permitted_beta_count(#vqstate { target_ram_count = 0, q3 = Q3 }) ->
     lists:min([?QUEUE:len(Q3), rabbit_queue_index:next_segment_boundary(0)]);
 permitted_beta_count(#vqstate { q1               = Q1,
@@ -2048,7 +2433,8 @@ chunk_size(Current, Permitted)
 chunk_size(Current, Permitted) ->
     Current - Permitted.
 
-fetch_from_q3(State = #vqstate { q1    = Q1,
+fetch_from_q3(State = #vqstate { mode  = default,
+                                 q1    = Q1,
                                  q2    = Q2,
                                  delta = #delta { count = DeltaCount },
                                  q3    = Q3,
@@ -2078,6 +2464,19 @@ fetch_from_q3(State = #vqstate { q1    = Q1,
                              State1
                      end,
             {loaded, {MsgStatus, State2}}
+    end;
+%% lazy queues
+fetch_from_q3(State = #vqstate { mode  = lazy,
+                                 delta = #delta { count = DeltaCount },
+                                 q3    = Q3 }) ->
+    case ?QUEUE:out(Q3) of
+        {empty, _Q3} when DeltaCount =:= 0 ->
+            {empty, State};
+        {empty, _Q3} ->
+            fetch_from_q3(maybe_deltas_to_betas(State));
+        {{value, MsgStatus}, Q3a} ->
+            State1 = State #vqstate { q3 = Q3a },
+            {loaded, {MsgStatus, State1}}
     end.
 
 maybe_deltas_to_betas(State) ->
@@ -2186,7 +2585,8 @@ push_alphas_to_betas(Generator, Consumer, Quota, Q, State) ->
                  end
     end.
 
-push_betas_to_deltas(Quota, State = #vqstate { q2    = Q2,
+push_betas_to_deltas(Quota, State = #vqstate { mode  = default,
+                                               q2    = Q2,
                                                delta = Delta,
                                                q3    = Q3}) ->
     PushState = {Quota, Delta, State},
@@ -2201,8 +2601,22 @@ push_betas_to_deltas(Quota, State = #vqstate { q2    = Q2,
     {_, Delta1, State1} = PushState2,
     State1 #vqstate { q2    = Q2a,
                       delta = Delta1,
+                      q3    = Q3a };
+%% In the case of lazy queues we want to page as many messages as
+%% possible from q3.
+push_betas_to_deltas(Quota, State = #vqstate { mode  = lazy,
+                                               delta = Delta,
+                                               q3    = Q3}) ->
+    PushState = {Quota, Delta, State},
+    {Q3a, PushState1} = push_betas_to_deltas(
+                          fun ?QUEUE:out_r/1,
+                          fun (Q2MinSeqId) -> Q2MinSeqId end,
+                          Q3, PushState),
+    {_, Delta1, State1} = PushState1,
+    State1 #vqstate { delta = Delta1,
                       q3    = Q3a }.
 
+
 push_betas_to_deltas(Generator, LimitFun, Q, PushState) ->
     case ?QUEUE:is_empty(Q) of
         true ->
similarity index 77%
rename from rabbitmq-server/src/rabbit_version.erl
rename to deps/rabbit/src/rabbit_version.erl
index d3b2be1b5004ebadfcf8416c7d7ddfe05353577c..a27f0aca00525efcb7228f3e15fddfd0f3ff808c 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_version).
 
 -export([recorded/0, matches/2, desired/0, desired_for_scope/1,
          record_desired/0, record_desired_for_scope/1,
-         upgrades_required/1]).
+         upgrades_required/1, check_version_consistency/3,
+         check_version_consistency/4, check_otp_consistency/1,
+         version_error/3]).
 
 %% -------------------------------------------------------------------
--ifdef(use_specs).
 
 -export_type([scope/0, step/0]).
 
--type(scope() :: atom()).
--type(scope_version() :: [atom()]).
--type(step() :: {atom(), atom()}).
+-type scope() :: atom().
+-type scope_version() :: [atom()].
+-type step() :: {atom(), atom()}.
+
+-type version() :: [atom()].
+
+-spec recorded() -> rabbit_types:ok_or_error2(version(), any()).
+-spec matches([A], [A]) -> boolean().
+-spec desired() -> version().
+-spec desired_for_scope(scope()) -> scope_version().
+-spec record_desired() -> 'ok'.
+-spec record_desired_for_scope
+        (scope()) -> rabbit_types:ok_or_error(any()).
+-spec upgrades_required
+        (scope()) -> rabbit_types:ok_or_error2([step()], any()).
+-spec check_version_consistency
+        (string(), string(), string()) -> rabbit_types:ok_or_error(any()).
+-spec check_version_consistency
+        (string(), string(), string(), string()) ->
+                                          rabbit_types:ok_or_error(any()).
+-spec check_otp_consistency
+        (string()) -> rabbit_types:ok_or_error(any()).
 
--type(version() :: [atom()]).
-
--spec(recorded/0 :: () -> rabbit_types:ok_or_error2(version(), any())).
--spec(matches/2 :: ([A], [A]) -> boolean()).
--spec(desired/0 :: () -> version()).
--spec(desired_for_scope/1 :: (scope()) -> scope_version()).
--spec(record_desired/0 :: () -> 'ok').
--spec(record_desired_for_scope/1 ::
-        (scope()) -> rabbit_types:ok_or_error(any())).
--spec(upgrades_required/1 ::
-        (scope()) -> rabbit_types:ok_or_error2([step()], any())).
-
--endif.
 %% -------------------------------------------------------------------
 
 -define(VERSION_FILENAME, "schema_version").
@@ -173,3 +180,22 @@ categorise_by_scope(Version) when is_list(Version) ->
 dir() -> rabbit_mnesia:dir().
 
 schema_filename() -> filename:join(dir(), ?VERSION_FILENAME).
+
+%% --------------------------------------------------------------------
+
+check_version_consistency(This, Remote, Name) ->
+    check_version_consistency(This, Remote, Name, fun (A, B) -> A =:= B end).
+
+check_version_consistency(This, Remote, Name, Comp) ->
+    case Comp(This, Remote) of
+        true  -> ok;
+        false -> version_error(Name, This, Remote)
+    end.
+
+version_error(Name, This, Remote) ->
+    {error, {inconsistent_cluster,
+             rabbit_misc:format("~s version mismatch: local node is ~s, "
+                                "remote node ~s", [Name, This, Remote])}}.
+
+check_otp_consistency(Remote) ->
+    check_version_consistency(rabbit_misc:otp_release(), Remote, "OTP").
similarity index 84%
rename from rabbitmq-server/src/rabbit_vhost.erl
rename to deps/rabbit/src/rabbit_vhost.erl
index 9b627adf5d20daaeda772c0bae7b2075bd590039..df2f8423b48a298c6049629f1be727dbdb098b40 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_vhost).
 %%----------------------------------------------------------------------------
 
 -export([add/1, delete/1, exists/1, list/0, with/2, assert/1]).
--export([info/1, info/2, info_all/0, info_all/1]).
-
--ifdef(use_specs).
-
--spec(add/1 :: (rabbit_types:vhost()) -> 'ok').
--spec(delete/1 :: (rabbit_types:vhost()) -> 'ok').
--spec(exists/1 :: (rabbit_types:vhost()) -> boolean()).
--spec(list/0 :: () -> [rabbit_types:vhost()]).
--spec(with/2 :: (rabbit_types:vhost(), rabbit_misc:thunk(A)) -> A).
--spec(assert/1 :: (rabbit_types:vhost()) -> 'ok').
-
--spec(info/1 :: (rabbit_types:vhost()) -> rabbit_types:infos()).
--spec(info/2 :: (rabbit_types:vhost(), rabbit_types:info_keys())
-                -> rabbit_types:infos()).
--spec(info_all/0 :: () -> [rabbit_types:infos()]).
--spec(info_all/1 :: (rabbit_types:info_keys()) -> [rabbit_types:infos()]).
-
--endif.
+-export([info/1, info/2, info_all/0, info_all/1, info_all/2, info_all/3]).
+
+-spec add(rabbit_types:vhost()) -> 'ok'.
+-spec delete(rabbit_types:vhost()) -> 'ok'.
+-spec exists(rabbit_types:vhost()) -> boolean().
+-spec list() -> [rabbit_types:vhost()].
+-spec with(rabbit_types:vhost(), rabbit_misc:thunk(A)) -> A.
+-spec assert(rabbit_types:vhost()) -> 'ok'.
+
+-spec info(rabbit_types:vhost()) -> rabbit_types:infos().
+-spec info(rabbit_types:vhost(), rabbit_types:info_keys())
+                -> rabbit_types:infos().
+-spec info_all() -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:info_keys()) -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:info_keys(), reference(), pid()) ->
+                         'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -153,3 +151,8 @@ info(VHost, Items) -> infos(Items, VHost).
 
 info_all()      -> info_all(?INFO_KEYS).
 info_all(Items) -> [info(VHost, Items) || VHost <- list()].
+
+info_all(Ref, AggregatorPid)        -> info_all(?INFO_KEYS, Ref, AggregatorPid).
+info_all(Items, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(
+       AggregatorPid, Ref, fun(VHost) -> info(VHost, Items) end, list()).
similarity index 85%
rename from rabbitmq-server/src/rabbit_vm.erl
rename to deps/rabbit/src/rabbit_vm.erl
index 534a8883e18237f67b458e52831fc4d7dbfeebe0..9c8732bb6b62bdfdd45baa2834598bc2930e2b97 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_vm).
 
--export([memory/0, binary/0]).
+-export([memory/0, binary/0, ets_tables_memory/1]).
 
 -define(MAGIC_PLUGINS, ["mochiweb", "webmachine", "cowboy", "sockjs",
                         "rfc4627_jsonrpc"]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(memory/0 :: () -> rabbit_types:infos()).
--spec(binary/0 :: () -> rabbit_types:infos()).
-
--endif.
+-spec memory() -> rabbit_types:infos().
+-spec binary() -> rabbit_types:infos().
+-spec ets_tables_memory(Owners) -> rabbit_types:infos()
+     when Owners :: all | OwnerProcessName | [OwnerProcessName],
+          OwnerProcessName :: atom().
 
 %%----------------------------------------------------------------------------
 
@@ -45,7 +44,7 @@ memory() ->
 
     Mnesia       = mnesia_memory(),
     MsgIndexETS  = ets_memory([msg_store_persistent, msg_store_transient]),
-    MgmtDbETS    = ets_memory([rabbit_mgmt_db]),
+    MgmtDbETS    = ets_memory([rabbit_mgmt_event_collector]),
 
     [{total,     Total},
      {processes, Processes},
@@ -118,12 +117,25 @@ mnesia_memory() ->
     end.
 
 ets_memory(OwnerNames) ->
+    lists:sum([V || {_K, V} <- ets_tables_memory(OwnerNames)]).
+
+ets_tables_memory(all) ->
+    [{ets:info(T, name), bytes(ets:info(T, memory))}
+     || T <- ets:all(),
+        is_atom(T)];
+ets_tables_memory(OwnerName) when is_atom(OwnerName) ->
+    ets_tables_memory([OwnerName]);
+ets_tables_memory(OwnerNames) when is_list(OwnerNames) ->
     Owners = [whereis(N) || N <- OwnerNames],
-    lists:sum([bytes(ets:info(T, memory)) || T <- ets:all(),
-                                             O <- [ets:info(T, owner)],
-                                             lists:member(O, Owners)]).
-
-bytes(Words) ->  Words * erlang:system_info(wordsize).
+    [{ets:info(T, name), bytes(ets:info(T, memory))}
+     || T <- ets:all(),
+        lists:member(ets:info(T, owner), Owners)].
+
+bytes(Words) ->  try
+                     Words * erlang:system_info(wordsize)
+                 catch
+                     _:_ -> 0
+                 end.
 
 interesting_sups() ->
     [[rabbit_amqqueue_sup_sup], conn_sups() | interesting_sups0()].
@@ -134,7 +146,18 @@ interesting_sups0() ->
     PluginProcs   = plugin_sups(),
     [MsgIndexProcs, MgmtDbProcs, PluginProcs].
 
-conn_sups()     -> [rabbit_tcp_client_sup, ssl_connection_sup, amqp_sup].
+conn_sups()     ->
+    Ranches = lists:flatten(ranch_server_sups()),
+    [amqp_sup|Ranches].
+
+ranch_server_sups() ->
+    try
+        ets:match(ranch_server, {{conns_sup, '_'}, '$1'})
+    catch
+        %% Ranch ETS table doesn't exist yet
+        error:badarg  -> []
+    end.
+
 conn_sups(With) -> [{Sup, With} || Sup <- conn_sups()].
 
 distinguishers() -> [{rabbit_amqqueue_sup_sup, fun queue_type/1} |
@@ -203,21 +226,19 @@ conn_type(PDict) ->
 
 %% NB: this code is non-rabbit specific.
 
--ifdef(use_specs).
--type(process() :: pid() | atom()).
--type(info_key() :: atom()).
--type(info_value() :: any()).
--type(info_item() :: {info_key(), info_value()}).
--type(accumulate() :: fun ((info_key(), info_value(), info_value()) ->
-                                  info_value())).
--type(distinguisher() :: fun (([{term(), term()}]) -> atom())).
--type(distinguishers() :: [{info_key(), distinguisher()}]).
--spec(sum_processes/3 :: ([process()], distinguishers(), [info_key()]) ->
-                              {[{process(), [info_item()]}], [info_item()]}).
--spec(sum_processes/4 :: ([process()], accumulate(), distinguishers(),
+-type process() :: pid() | atom().
+-type info_key() :: atom().
+-type info_value() :: any().
+-type info_item() :: {info_key(), info_value()}.
+-type accumulate() :: fun ((info_key(), info_value(), info_value()) ->
+                                  info_value()).
+-type distinguisher() :: fun (([{term(), term()}]) -> atom()).
+-type distinguishers() :: [{info_key(), distinguisher()}].
+-spec sum_processes([process()], distinguishers(), [info_key()]) ->
+                              {[{process(), [info_item()]}], [info_item()]}.
+-spec sum_processes([process()], accumulate(), distinguishers(),
                           [info_item()]) ->
-                              {[{process(), [info_item()]}], [info_item()]}).
--endif.
+                              {[{process(), [info_item()]}], [info_item()]}.
 
 sum_processes(Names, Distinguishers, Items) ->
     sum_processes(Names, fun (_, X, Y) -> X + Y end, Distinguishers,
similarity index 89%
rename from rabbitmq-server/src/supervised_lifecycle.erl
rename to deps/rabbit/src/supervised_lifecycle.erl
index 1010bbb51db52bcaf20fe2ae474c6abb9af4c4de..5b0f56dc26747551f1bfb91b9824ba91fdd6bc58 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% Invoke callbacks on startup and termination.
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/3 :: (atom(), rabbit_types:mfargs(), rabbit_types:mfargs()) ->
-                           rabbit_types:ok_pid_or_error()).
-
--endif.
+-spec start_link(atom(), rabbit_types:mfargs(), rabbit_types:mfargs()) ->
+          rabbit_types:ok_pid_or_error().
 
 %%----------------------------------------------------------------------------
 
diff --git a/deps/rabbit/src/tcp_listener.erl b/deps/rabbit/src/tcp_listener.erl
new file mode 100644 (file)
index 0000000..5f15592
--- /dev/null
@@ -0,0 +1,101 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(tcp_listener).
+
+%% Represents a running TCP listener (a process that listens for inbound
+%% TCP or TLS connections). Every protocol supported typically has one
+%% or two listeners, plain TCP and (optionally) TLS, but there can
+%% be more, e.g. when multiple network interfaces are involved.
+%%
+%% A listener has 6 properties (is a tuple of 6):
+%%
+%%  * IP address
+%%  * Port
+%%  * Node
+%%  * Label (human-friendly name, e.g. AMQP 0-9-1)
+%%  * Startup callback
+%%  * Shutdown callback
+%%
+%% Listeners use Ranch in embedded mode to accept and "bridge" client
+%% connections with protocol entry points such as rabbit_reader.
+%%
+%% Listeners are tracked in a Mnesia table so that they can be
+%%
+%%  * Shut down
+%%  * Listed (e.g. in the management UI)
+%%
+%% Every tcp_listener process has callbacks that are executed on start
+%% and termination. Those must take care of listener registration
+%% among other things.
+%%
+%% Listeners are supervised by tcp_listener_sup (one supervisor per protocol).
+%%
+%% See also rabbit_networking and tcp_listener_sup.
+
+-behaviour(gen_server).
+
+-export([start_link/5]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+         terminate/2, code_change/3]).
+
+-record(state, {on_startup, on_shutdown, label, ip, port}).
+
+%%----------------------------------------------------------------------------
+
+-type mfargs() :: {atom(), atom(), [any()]}.
+
+-spec start_link
+        (inet:ip_address(), inet:port_number(),
+         mfargs(), mfargs(), string()) ->
+                           rabbit_types:ok_pid_or_error().
+
+%%--------------------------------------------------------------------
+
+start_link(IPAddress, Port,
+           OnStartup, OnShutdown, Label) ->
+    gen_server:start_link(
+      ?MODULE, {IPAddress, Port,
+                OnStartup, OnShutdown, Label}, []).
+
+%%--------------------------------------------------------------------
+
+init({IPAddress, Port, {M,F,A} = OnStartup, OnShutdown, Label}) ->
+    process_flag(trap_exit, true),
+    error_logger:info_msg(
+      "started ~s on ~s:~p~n",
+      [Label, rabbit_misc:ntoab(IPAddress), Port]),
+    apply(M, F, A ++ [IPAddress, Port]),
+    {ok, #state{on_startup = OnStartup, on_shutdown = OnShutdown,
+                label = Label, ip=IPAddress, port=Port}}.
+
+handle_call(_Request, _From, State) ->
+    {noreply, State}.
+
+handle_cast(_Msg, State) ->
+    {noreply, State}.
+
+handle_info(_Info, State) ->
+    {noreply, State}.
+
+terminate(_Reason, #state{on_shutdown = {M,F,A}, label=Label, ip=IPAddress, port=Port}) ->
+    error_logger:info_msg("stopped ~s on ~s:~p~n",
+                          [Label, rabbit_misc:ntoab(IPAddress), Port]),
+    apply(M, F, A ++ [IPAddress, Port]).
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
diff --git a/deps/rabbit/src/tcp_listener_sup.erl b/deps/rabbit/src/tcp_listener_sup.erl
new file mode 100644 (file)
index 0000000..5ef652a
--- /dev/null
@@ -0,0 +1,62 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(tcp_listener_sup).
+
+%% Supervises TCP listeners. There is a separate supervisor for every
+%% protocol. In case of AMQP 0-9-1, it resides under rabbit_sup. Plugins
+%% that provide protocol support (e.g. STOMP) have an instance of this supervisor in their
+%% app supervision tree.
+%%
+%% See also rabbit_networking and tcp_listener.
+
+-behaviour(supervisor).
+
+-export([start_link/10]).
+
+-export([init/1]).
+
+%%----------------------------------------------------------------------------
+
+-type mfargs() :: {atom(), atom(), [any()]}.
+
+-spec start_link
+        (inet:ip_address(), inet:port_number(), module(), [gen_tcp:listen_option()],
+         module(), any(), mfargs(), mfargs(), integer(), string()) ->
+                           rabbit_types:ok_pid_or_error().
+
+%%----------------------------------------------------------------------------
+
+start_link(IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, OnShutdown,
+           ConcurrentAcceptorCount, Label) ->
+    supervisor:start_link(
+      ?MODULE, {IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, OnShutdown,
+                ConcurrentAcceptorCount, Label}).
+
+init({IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, OnShutdown,
+      ConcurrentAcceptorCount, Label}) ->
+    {ok, AckTimeout} = application:get_env(rabbit, ssl_handshake_timeout),
+    {ok, {{one_for_all, 10, 10}, [
+        ranch:child_spec({acceptor, IPAddress, Port}, ConcurrentAcceptorCount,
+            Transport, [{port, Port}, {ip, IPAddress},
+                {max_connections, infinity},
+                {ack_timeout, AckTimeout},
+                {connection_type, supervisor}|SocketOpts],
+            ProtoSup, ProtoOpts),
+        {tcp_listener, {tcp_listener, start_link,
+                        [IPAddress, Port,
+                         OnStartup, OnShutdown, Label]},
+         transient, 16#ffffffff, worker, [tcp_listener]}]}}.
similarity index 66%
rename from rabbitmq-server/src/truncate.erl
rename to deps/rabbit/src/truncate.erl
index 8feae35f88853ca5895dcab2209bd6be17741c94..a1586b0cb0152a255b264a01b4071a1a11c001ad 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(truncate).
 -record(params, {content, struct, content_dec, struct_dec}).
 
 -export([log_event/2, term/2]).
-%% exported for testing
--export([test/0]).
+
+-ifdef(TEST).
+-export([term_size/3]).
+-endif.
 
 log_event({Type, GL, {Pid, Format, Args}}, Params)
   when Type =:= error orelse
@@ -123,72 +125,3 @@ tuple_term_size(_T, M, I, S, _W) when I > S ->
     M;
 tuple_term_size(T, M, I, S, W) ->
     tuple_term_size(T, lim(term_size(element(I, T), M, W), 2 * W), I + 1, S, W).
-
-%%----------------------------------------------------------------------------
-
-test() ->
-    test_short_examples_exactly(),
-    test_term_limit(),
-    test_large_examples_for_size(),
-    ok.
-
-test_short_examples_exactly() ->
-    F = fun (Term, Exp) ->
-                Exp = term(Term, {1, {10, 10, 5, 5}}),
-                Term = term(Term, {100000, {10, 10, 5, 5}})
-        end,
-    FSmall = fun (Term, Exp) ->
-                     Exp = term(Term, {1, {2, 2, 2, 2}}),
-                     Term = term(Term, {100000, {2, 2, 2, 2}})
-             end,
-    F([], []),
-    F("h", "h"),
-    F("hello world", "hello w..."),
-    F([[h,e,l,l,o,' ',w,o,r,l,d]], [[h,e,l,l,o,'...']]),
-    F([a|b], [a|b]),
-    F(<<"hello">>, <<"hello">>),
-    F([<<"hello world">>], [<<"he...">>]),
-    F(<<1:1>>, <<1:1>>),
-    F(<<1:81>>, <<0:56, "...">>),
-    F({{{{a}}},{b},c,d,e,f,g,h,i,j,k}, {{{'...'}},{b},c,d,e,f,g,h,i,j,'...'}),
-    FSmall({a,30,40,40,40,40}, {a,30,'...'}),
-    FSmall([a,30,40,40,40,40], [a,30,'...']),
-    P = spawn(fun() -> receive die -> ok end end),
-    F([0, 0.0, <<1:1>>, F, P], [0, 0.0, <<1:1>>, F, P]),
-    P ! die,
-    R = make_ref(),
-    F([R], [R]),
-    ok.
-
-test_term_limit() ->
-    W = erlang:system_info(wordsize),
-    S = <<"abc">>,
-    1 = term_size(S, 4, W),
-    limit_exceeded = term_size(S, 3, W),
-    case 100 - term_size([S, S], 100, W) of
-        22 -> ok; %% 32 bit
-        38 -> ok  %% 64 bit
-    end,
-    case 100 - term_size([S, [S]], 100, W) of
-        30 -> ok; %% ditto
-        54 -> ok
-    end,
-    limit_exceeded = term_size([S, S], 6, W),
-    ok.
-
-test_large_examples_for_size() ->
-    %% Real world values
-    Shrink = fun(Term) -> term(Term, {1, {1000, 100, 50, 5}}) end,
-    TestSize = fun(Term) ->
-                       true = 5000000 < size(term_to_binary(Term)),
-                       true = 500000 > size(term_to_binary(Shrink(Term)))
-               end,
-    TestSize(lists:seq(1, 5000000)),
-    TestSize(recursive_list(1000, 10)),
-    TestSize(recursive_list(5000, 20)),
-    TestSize(gb_sets:from_list([I || I <- lists:seq(1, 1000000)])),
-    TestSize(gb_trees:from_orddict([{I, I} || I <- lists:seq(1, 1000000)])),
-    ok.
-
-recursive_list(S, 0) -> lists:seq(1, S);
-recursive_list(S, N) -> [recursive_list(S div N, N-1) || _ <- lists:seq(1, S)].
similarity index 87%
rename from rabbitmq-server/src/vm_memory_monitor.erl
rename to deps/rabbit/src/vm_memory_monitor.erl
index bf9a77c174e5aaf386959e38490659a045984748..6b043685bde64bb9b6abae7a58bec13d91b50c92 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% In practice Erlang shouldn't be allowed to grow to more than a half
 %% wrong. Scale by vm_memory_high_watermark in configuration to get a
 %% sensible value.
 -define(MEMORY_SIZE_FOR_UNKNOWN_OS, 1073741824).
+-define(DEFAULT_VM_MEMORY_HIGH_WATERMARK, 0.4).
 
 -record(state, {total_memory,
                 memory_limit,
-                memory_fraction,
+                memory_config_limit,
                 timeout,
                 timer,
                 alarmed,
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/1 :: (float()) -> rabbit_types:ok_pid_or_error()).
--spec(start_link/3 :: (float(), fun ((any()) -> 'ok'),
-                       fun ((any()) -> 'ok')) -> rabbit_types:ok_pid_or_error()).
--spec(get_total_memory/0 :: () -> (non_neg_integer() | 'unknown')).
--spec(get_vm_limit/0 :: () -> non_neg_integer()).
--spec(get_check_interval/0 :: () -> non_neg_integer()).
--spec(set_check_interval/1 :: (non_neg_integer()) -> 'ok').
--spec(get_vm_memory_high_watermark/0 :: () -> float()).
--spec(set_vm_memory_high_watermark/1 :: (float()) -> 'ok').
--spec(get_memory_limit/0 :: () -> non_neg_integer()).
-
--endif.
+-type vm_memory_high_watermark() :: (float() | {'absolute', integer() | string()}).
+-spec start_link(float()) -> rabbit_types:ok_pid_or_error().
+-spec start_link(float(), fun ((any()) -> 'ok'),
+                       fun ((any()) -> 'ok')) -> rabbit_types:ok_pid_or_error().
+-spec get_total_memory() -> (non_neg_integer() | 'unknown').
+-spec get_vm_limit() -> non_neg_integer().
+-spec get_check_interval() -> non_neg_integer().
+-spec set_check_interval(non_neg_integer()) -> 'ok'.
+-spec get_vm_memory_high_watermark() -> vm_memory_high_watermark().
+-spec set_vm_memory_high_watermark(vm_memory_high_watermark()) -> 'ok'.
+-spec get_memory_limit() -> non_neg_integer().
 
 %%----------------------------------------------------------------------------
 %% Public API
@@ -128,11 +126,12 @@ init([MemFraction, AlarmFuns]) ->
                      alarm_funs = AlarmFuns },
     {ok, set_mem_limits(State, MemFraction)}.
 
-handle_call(get_vm_memory_high_watermark, _From, State) ->
-    {reply, State#state.memory_fraction, State};
+handle_call(get_vm_memory_high_watermark, _From,
+           #state{memory_config_limit = MemLimit} = State) ->
+    {reply, MemLimit, State};
 
-handle_call({set_vm_memory_high_watermark, MemFraction}, _From, State) ->
-    {reply, ok, set_mem_limits(State, MemFraction)};
+handle_call({set_vm_memory_high_watermark, MemLimit}, _From, State) ->
+    {reply, ok, set_mem_limits(State, MemLimit)};
 
 handle_call(get_check_interval, _From, State) ->
     {reply, State#state.timeout, State};
@@ -166,7 +165,7 @@ code_change(_OldVsn, State, _Extra) ->
 %% Server Internals
 %%----------------------------------------------------------------------------
 
-set_mem_limits(State, MemFraction) ->
+set_mem_limits(State, MemLimit) ->
     case erlang:system_info(wordsize) of
         4 ->
             error_logger:warning_msg(
@@ -206,12 +205,31 @@ set_mem_limits(State, MemFraction) ->
             _ ->
                 TotalMemory
         end,
-    MemLim = trunc(MemFraction * UsableMemory),
+    MemLim = interpret_limit(parse_mem_limit(MemLimit), UsableMemory),
     error_logger:info_msg("Memory limit set to ~pMB of ~pMB total.~n",
                           [trunc(MemLim/?ONE_MB), trunc(TotalMemory/?ONE_MB)]),
     internal_update(State #state { total_memory    = TotalMemory,
                                    memory_limit    = MemLim,
-                                   memory_fraction = MemFraction}).
+                                   memory_config_limit = MemLimit}).
+
+interpret_limit({'absolute', MemLim}, UsableMemory) ->
+    erlang:min(MemLim, UsableMemory);
+interpret_limit(MemFraction, UsableMemory) ->
+    trunc(MemFraction * UsableMemory).
+
+
+parse_mem_limit({absolute, Limit}) ->
+    case rabbit_resource_monitor_misc:parse_information_unit(Limit) of
+        {ok, ParsedLimit} -> {absolute, ParsedLimit};
+        {error, parse_error} ->
+            rabbit_log:error("Unable to parse vm_memory_high_watermark value ~p", [Limit]),
+            ?DEFAULT_VM_MEMORY_HIGH_WATERMARK
+    end;
+parse_mem_limit(Relative) when is_float(Relative), Relative < 1 ->
+    Relative;
+parse_mem_limit(_) ->
+    ?DEFAULT_VM_MEMORY_HIGH_WATERMARK.
+
 
 internal_update(State = #state { memory_limit = MemLimit,
                                  alarmed      = Alarmed,
@@ -386,7 +404,7 @@ sysctl(Def) ->
 read_proc_file(File) ->
     {ok, IoDevice} = file:open(File, [read, raw]),
     Res = read_proc_file(IoDevice, []),
-    file:close(IoDevice),
+    _ = file:close(IoDevice),
     lists:flatten(lists:reverse(Res)).
 
 -define(BUFFER_SIZE, 1024).
similarity index 77%
rename from rabbitmq-server/src/worker_pool.erl
rename to deps/rabbit/src/worker_pool.erl
index 99b227e39275aedc605cd5141cff295fcbcda638..c0be486f5fbaf72df6f598d73ceddd5d9303a6f1 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(worker_pool).
 
 %% Generic worker pool manager.
 %%
-%% Submitted jobs are functions. They can be executed asynchronously
-%% (using worker_pool:submit/1, worker_pool:submit/2) or synchronously
+%% Submitted jobs are functions. They can be executed synchronously
+%% (using worker_pool:submit/1, worker_pool:submit/2) or asynchronously
 %% (using worker_pool:submit_async/1).
 %%
 %% We typically use the worker pool if we want to limit the maximum
 
 -behaviour(gen_server2).
 
--export([start_link/0, submit/1, submit/2, submit_async/1, ready/1,
-         idle/1]).
+-export([start_link/1,
+         submit/1, submit/2, submit/3,
+         submit_async/1, submit_async/2,
+         ready/2,
+         idle/2,
+         default_pool/0]).
 
 -export([init/1, handle_call/3, handle_cast/2, handle_info/2,
          terminate/2, code_change/3]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type mfargs() :: {atom(), atom(), [any()]}.
 
--type(mfargs() :: {atom(), atom(), [any()]}).
-
--spec(start_link/0 :: () -> {'ok', pid()} | {'error', any()}).
--spec(submit/1 :: (fun (() -> A) | mfargs()) -> A).
--spec(submit/2 :: (fun (() -> A) | mfargs(), 'reuse' | 'single') -> A).
--spec(submit_async/1 :: (fun (() -> any()) | mfargs()) -> 'ok').
--spec(ready/1 :: (pid()) -> 'ok').
--spec(idle/1 :: (pid()) -> 'ok').
-
--endif.
+-spec start_link(atom()) -> {'ok', pid()} | {'error', any()}.
+-spec submit(fun (() -> A) | mfargs()) -> A.
+-spec submit(fun (() -> A) | mfargs(), 'reuse' | 'single') -> A.
+-spec submit(atom(), fun (() -> A) | mfargs(), 'reuse' | 'single') -> A.
+-spec submit_async(fun (() -> any()) | mfargs()) -> 'ok'.
+-spec ready(atom(), pid()) -> 'ok'.
+-spec idle(atom(), pid()) -> 'ok'.
+-spec default_pool() -> atom().
 
 %%----------------------------------------------------------------------------
 
--define(SERVER, ?MODULE).
+-define(DEFAULT_POOL, ?MODULE).
 -define(HIBERNATE_AFTER_MIN, 1000).
 -define(DESIRED_HIBERNATE, 10000).
 
 
 %%----------------------------------------------------------------------------
 
-start_link() -> gen_server2:start_link({local, ?SERVER}, ?MODULE, [],
-                                       [{timeout, infinity}]).
+start_link(Name) -> gen_server2:start_link({local, Name}, ?MODULE, [],
+                                           [{timeout, infinity}]).
 
 submit(Fun) ->
-    submit(Fun, reuse).
+    submit(?DEFAULT_POOL, Fun, reuse).
 
 %% ProcessModel =:= single is for working around the mnesia_locker bug.
 submit(Fun, ProcessModel) ->
+    submit(?DEFAULT_POOL, Fun, ProcessModel).
+
+submit(Server, Fun, ProcessModel) ->
     case get(worker_pool_worker) of
         true -> worker_pool_worker:run(Fun);
-        _    -> Pid = gen_server2:call(?SERVER, {next_free, self()}, infinity),
+        _    -> Pid = gen_server2:call(Server, {next_free, self()}, infinity),
                 worker_pool_worker:submit(Pid, Fun, ProcessModel)
     end.
 
-submit_async(Fun) -> gen_server2:cast(?SERVER, {run_async, Fun}).
+submit_async(Fun) -> submit_async(?DEFAULT_POOL, Fun).
+
+submit_async(Server, Fun) -> gen_server2:cast(Server, {run_async, Fun}).
+
+ready(Server, WPid) -> gen_server2:cast(Server, {ready, WPid}).
 
-ready(WPid) -> gen_server2:cast(?SERVER, {ready, WPid}).
+idle(Server, WPid) -> gen_server2:cast(Server, {idle, WPid}).
 
-idle(WPid) -> gen_server2:cast(?SERVER, {idle, WPid}).
+default_pool() -> ?DEFAULT_POOL.
 
 %%----------------------------------------------------------------------------
 
diff --git a/deps/rabbit/src/worker_pool_sup.erl b/deps/rabbit/src/worker_pool_sup.erl
new file mode 100644 (file)
index 0000000..f4ed4d7
--- /dev/null
@@ -0,0 +1,56 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(worker_pool_sup).
+
+-behaviour(supervisor).
+
+-export([start_link/0, start_link/1, start_link/2]).
+
+-export([init/1]).
+
+%%----------------------------------------------------------------------------
+
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start_link(non_neg_integer()) -> rabbit_types:ok_pid_or_error().
+-spec start_link(non_neg_integer(), atom())
+                   -> rabbit_types:ok_pid_or_error().
+
+%%----------------------------------------------------------------------------
+
+start_link() ->
+    start_link(erlang:system_info(schedulers)).
+
+start_link(WCount) ->
+    start_link(WCount, worker_pool:default_pool()).
+
+start_link(WCount, PoolName) ->
+    SupName = list_to_atom(atom_to_list(PoolName) ++ "_sup"),
+    supervisor:start_link({local, SupName}, ?MODULE, [WCount, PoolName]).
+
+%%----------------------------------------------------------------------------
+
+init([WCount, PoolName]) ->
+    %% we want to survive up to 1K of worker restarts per second,
+    %% e.g. when a large worker pool used for network connections
+    %% encounters a network failure. This is the case in the LDAP authentication
+    %% backend plugin.
+    {ok, {{one_for_one, 1000, 1},
+          [{worker_pool, {worker_pool, start_link, [PoolName]}, transient,
+            16#ffffffff, worker, [worker_pool]} |
+           [{N, {worker_pool_worker, start_link, [PoolName]}, transient,
+             16#ffffffff, worker, [worker_pool_worker]}
+            || N <- lists:seq(1, WCount)]]}}.
similarity index 80%
rename from rabbitmq-server/src/worker_pool_worker.erl
rename to deps/rabbit/src/worker_pool_worker.erl
index 6e66d8518eec573e2272749b581670701084408f..bd07f0d782ad7b68afa861ae9361504a62211367 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(worker_pool_worker).
@@ -23,7 +23,8 @@
 
 -behaviour(gen_server2).
 
--export([start_link/0, next_job_from/2, submit/3, submit_async/2, run/1]).
+-export([start_link/1, next_job_from/2, submit/3, submit_async/2,
+         run/1]).
 
 -export([set_maximum_since_use/2]).
 
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type mfargs() :: {atom(), atom(), [any()]}.
 
--type(mfargs() :: {atom(), atom(), [any()]}).
-
--spec(start_link/0 :: () -> {'ok', pid()} | {'error', any()}).
--spec(next_job_from/2 :: (pid(), pid()) -> 'ok').
--spec(submit/3 :: (pid(), fun (() -> A) | mfargs(), 'reuse' | 'single') -> A).
--spec(submit_async/2 :: (pid(), fun (() -> any()) | mfargs()) -> 'ok').
--spec(run/1 :: (fun (() -> A)) -> A; (mfargs()) -> any()).
--spec(set_maximum_since_use/2 :: (pid(), non_neg_integer()) -> 'ok').
-
--endif.
+-spec start_link(atom) -> {'ok', pid()} | {'error', any()}.
+-spec next_job_from(pid(), pid()) -> 'ok'.
+-spec submit(pid(), fun (() -> A) | mfargs(), 'reuse' | 'single') -> A.
+-spec submit_async(pid(), fun (() -> any()) | mfargs()) -> 'ok'.
+-spec run(fun (() -> A)) -> A; (mfargs()) -> any().
+-spec set_maximum_since_use(pid(), non_neg_integer()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -52,8 +49,8 @@
 
 %%----------------------------------------------------------------------------
 
-start_link() ->
-    gen_server2:start_link(?MODULE, [], [{timeout, infinity}]).
+start_link(PoolName) ->
+    gen_server2:start_link(?MODULE, [PoolName], [{timeout, infinity}]).
 
 next_job_from(Pid, CPid) ->
     gen_server2:cast(Pid, {next_job_from, CPid}).
@@ -86,11 +83,12 @@ run(Fun, single) ->
 
 %%----------------------------------------------------------------------------
 
-init([]) ->
+init([PoolName]) ->
     ok = file_handle_cache:register_callback(?MODULE, set_maximum_since_use,
                                              [self()]),
-    ok = worker_pool:ready(self()),
+    ok = worker_pool:ready(PoolName, self()),
     put(worker_pool_worker, true),
+    put(worker_pool_name, PoolName),
     {ok, undefined, hibernate,
      {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
 
@@ -104,7 +102,7 @@ handle_call({submit, Fun, CPid, ProcessModel}, From, undefined) ->
 handle_call({submit, Fun, CPid, ProcessModel}, From, {from, CPid, MRef}) ->
     erlang:demonitor(MRef),
     gen_server2:reply(From, run(Fun, ProcessModel)),
-    ok = worker_pool:idle(self()),
+    ok = worker_pool:idle(get(worker_pool_name), self()),
     {noreply, undefined, hibernate};
 
 handle_call(Msg, _From, State) ->
@@ -116,12 +114,12 @@ handle_cast({next_job_from, CPid}, undefined) ->
 
 handle_cast({next_job_from, CPid}, {job, CPid, From, Fun, ProcessModel}) ->
     gen_server2:reply(From, run(Fun, ProcessModel)),
-    ok = worker_pool:idle(self()),
+    ok = worker_pool:idle(get(worker_pool_name), self()),
     {noreply, undefined, hibernate};
 
 handle_cast({submit_async, Fun}, undefined) ->
     run(Fun),
-    ok = worker_pool:idle(self()),
+    ok = worker_pool:idle(get(worker_pool_name), self()),
     {noreply, undefined, hibernate};
 
 handle_cast({set_maximum_since_use, Age}, State) ->
@@ -132,7 +130,7 @@ handle_cast(Msg, State) ->
     {stop, {unexpected_cast, Msg}, State}.
 
 handle_info({'DOWN', MRef, process, CPid, _Reason}, {from, CPid, MRef}) ->
-    ok = worker_pool:idle(self()),
+    ok = worker_pool:idle(get(worker_pool_name), self()),
     {noreply, undefined, hibernate};
 
 handle_info({'DOWN', _MRef, process, _Pid, _Reason}, State) ->
diff --git a/deps/rabbit_common/CODE_OF_CONDUCT.md b/deps/rabbit_common/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
similarity index 72%
rename from rabbitmq-server/plugins-src/eldap-wrapper/CONTRIBUTING.md
rename to deps/rabbit_common/CONTRIBUTING.md
index 69a4b4a437fdf25c45c200610d780c7a009146be..45bbcbe62e74c1a8682d2097db8eec955d177b9c 100644 (file)
@@ -20,22 +20,9 @@ If what you are going to work on is a substantial change, please first ask the c
 of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
 
 
-## (Brief) Code of Conduct
+## Code of Conduct
 
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
 
 
 ## Contributor Agreement
diff --git a/deps/rabbit_common/LICENSE b/deps/rabbit_common/LICENSE
new file mode 100644 (file)
index 0000000..f994905
--- /dev/null
@@ -0,0 +1,8 @@
+This package, the RabbitMQ commons library, is licensed under the MPL. For the
+MPL, please see LICENSE-MPL-RabbitMQ.
+
+The files 'ec_semver.erl' and 'ec_semver_parser.erl' are Copyright (c) 2011
+Erlware, LLC and licensed under a MIT license, see LICENSE-MIT-Erlware-Commons.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbit_common/LICENSE-MIT-Erlware-Commons b/deps/rabbit_common/LICENSE-MIT-Erlware-Commons
new file mode 100644 (file)
index 0000000..fc89c02
--- /dev/null
@@ -0,0 +1,21 @@
+Copyright (c) 2011 Erlware, LLC
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MPL-RabbitMQ
rename to deps/rabbit_common/LICENSE-MPL-RabbitMQ
index 0339c5377824da423a23a43f4989dd4936742e69..e163fccaf60cac14f2ae0cf6d0f2b73f8b83eac6 100644 (file)
@@ -447,7 +447,7 @@ EXHIBIT A -Mozilla Public License.
      The Original Code is RabbitMQ Management Plugin.
 
      The Initial Developer of the Original Code is GoPivotal, Inc.
-     Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.''
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
 
      [NOTE: The text of this Exhibit A may differ slightly from the text of
      the notices in the Source Code files of the Original Code. You should
diff --git a/deps/rabbit_common/Makefile b/deps/rabbit_common/Makefile
new file mode 100644 (file)
index 0000000..678acef
--- /dev/null
@@ -0,0 +1,69 @@
+PROJECT = rabbit_common
+
+BUILD_DEPS = rabbitmq_codegen
+TEST_DEPS = mochiweb proper
+
+.DEFAULT_GOAL = all
+
+EXTRA_SOURCES += include/rabbit_framing.hrl                            \
+                src/rabbit_framing_amqp_0_8.erl                        \
+                src/rabbit_framing_amqp_0_9_1.erl
+
+.DEFAULT_GOAL = all
+$(PROJECT).d:: $(EXTRA_SOURCES)
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include mk/rabbitmq-components.mk
+include erlang.mk
+include mk/rabbitmq-build.mk
+include mk/rabbitmq-dist.mk
+include mk/rabbitmq-tools.mk
+
+# --------------------------------------------------------------------
+# Compilation.
+# --------------------------------------------------------------------
+
+# $(ERTS_VER) is set in `rabbitmq-build.mk` above.
+tls_atom_version_MAX_ERTS_VER = 6.0
+ifeq ($(call compare_version,$(ERTS_VER),$(tls_atom_version_MAX_ERTS_VER),<),true)
+RMQ_ERLC_OPTS += -Ddefine_tls_atom_version
+endif
+
+# --------------------------------------------------------------------
+# Framing sources generation.
+# --------------------------------------------------------------------
+
+PYTHON       ?= python
+CODEGEN       = $(CURDIR)/codegen.py
+CODEGEN_DIR  ?= $(DEPS_DIR)/rabbitmq_codegen
+CODEGEN_AMQP  = $(CODEGEN_DIR)/amqp_codegen.py
+
+AMQP_SPEC_JSON_FILES_0_8   = $(CODEGEN_DIR)/amqp-rabbitmq-0.8.json
+AMQP_SPEC_JSON_FILES_0_9_1 = $(CODEGEN_DIR)/amqp-rabbitmq-0.9.1.json   \
+                            $(CODEGEN_DIR)/credit_extension.json
+
+include/rabbit_framing.hrl:: $(CODEGEN) $(CODEGEN_AMQP) \
+    $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8)
+       $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \
+        $(PYTHON) $(CODEGEN) --ignore-conflicts header \
+        $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8) $@
+
+src/rabbit_framing_amqp_0_9_1.erl:: $(CODEGEN) $(CODEGEN_AMQP) \
+    $(AMQP_SPEC_JSON_FILES_0_9_1)
+       $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \
+        $(PYTHON) $(CODEGEN) body $(AMQP_SPEC_JSON_FILES_0_9_1) $@
+
+src/rabbit_framing_amqp_0_8.erl:: $(CODEGEN) $(CODEGEN_AMQP) \
+    $(AMQP_SPEC_JSON_FILES_0_8)
+       $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \
+        $(PYTHON) $(CODEGEN) body $(AMQP_SPEC_JSON_FILES_0_8) $@
+
+clean:: clean-extra-sources
+
+clean-extra-sources:
+       $(gen_verbose) rm -f $(EXTRA_SOURCES)
old mode 100644 (file)
new mode 100755 (executable)
similarity index 65%
rename from rabbitmq-server/codegen.py
rename to deps/rabbit_common/codegen.py
index fbc6f61..a0300ed
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
 ##  The contents of this file are subject to the Mozilla Public License
 ##  Version 1.1 (the "License"); you may not use this file except in
 ##  compliance with the License. You may obtain a copy of the License
 ##
 
 from __future__ import nested_scopes
+from __future__ import print_function
 
 import sys
-sys.path.append("../rabbitmq-codegen")  # in case we're next to an experimental revision
-sys.path.append("codegen")              # in case we're building from a distribution package
 
 from amqp_codegen import *
 import string
@@ -35,13 +36,19 @@ def convertTable(d):
 
 erlangDefaultValueTypeConvMap = {
     bool : lambda x: str(x).lower(),
-    str : lambda x: "<<\"" + x + "\">>",
     int : lambda x: str(x),
     float : lambda x: str(x),
-    dict: convertTable,
-    unicode: lambda x: "<<\"" + x.encode("utf-8") + "\">>"
+    dict: convertTable
 }
 
+try:
+    _checkIfPython2 = unicode
+    erlangDefaultValueTypeConvMap[str] = lambda x: "<<\"" + x + "\">>"
+    erlangDefaultValueTypeConvMap[unicode] = lambda x: "<<\"" + x.encode("utf-8") + "\">>"
+except NameError:
+    erlangDefaultValueTypeConvMap[bytes] = lambda x: "<<\"" + x + "\">>"
+    erlangDefaultValueTypeConvMap[str] = lambda x: "<<\"" + x + "\">>"
+
 def erlangize(s):
     s = s.replace('-', '_')
     s = s.replace(' ', '_')
@@ -88,10 +95,10 @@ def prettyType(typeName, subTypes, typesPerLine = 4):
     sTs = multiLineFormat(subTypes,
                           "( ", " | ", "\n       | ", " )",
                           thingsPerLine = typesPerLine)
-    return "-type(%s ::\n       %s)." % (typeName, sTs)
+    return "-type %s ::\n       %s." % (typeName, sTs)
 
 def printFileHeader():
-    print """%%   Autogenerated code. Do not edit.
+    print("""%%   Autogenerated code. Do not edit.
 %%
 %%  The contents of this file are subject to the Mozilla Public License
 %%  Version 1.1 (the "License"); you may not use this file except in
@@ -107,7 +114,7 @@ def printFileHeader():
 %%
 %%  The Initial Developer of the Original Code is Pivotal Software, Inc.
 %%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%"""
+%%""")
 
 def genErl(spec):
     def erlType(domain):
@@ -126,30 +133,30 @@ def genErl(spec):
         return ', '.join([erlangize(f.name) + " = F" + str(f.index) for f in fields])
 
     def genLookupMethodName(m):
-        print "lookup_method_name({%d, %d}) -> %s;" % (m.klass.index, m.index, m.erlangName())
+        print("lookup_method_name({%d, %d}) -> %s;" % (m.klass.index, m.index, m.erlangName()))
 
     def genLookupClassName(c):
-        print "lookup_class_name(%d) -> %s;" % (c.index, c.erlangName())
+        print("lookup_class_name(%d) -> %s;" % (c.index, c.erlangName()))
 
     def genMethodId(m):
-        print "method_id(%s) -> {%d, %d};" % (m.erlangName(), m.klass.index, m.index)
+        print("method_id(%s) -> {%d, %d};" % (m.erlangName(), m.klass.index, m.index))
 
     def genMethodHasContent(m):
-        print "method_has_content(%s) -> %s;" % (m.erlangName(), str(m.hasContent).lower())
+        print("method_has_content(%s) -> %s;" % (m.erlangName(), str(m.hasContent).lower()))
 
     def genMethodIsSynchronous(m):
         hasNoWait = "nowait" in fieldNameList(m.arguments)
         if m.isSynchronous and hasNoWait:
-          print "is_method_synchronous(#%s{nowait = NoWait}) -> not(NoWait);" % (m.erlangName())
+          print("is_method_synchronous(#%s{nowait = NoWait}) -> not(NoWait);" % (m.erlangName()))
         else:
-          print "is_method_synchronous(#%s{}) -> %s;" % (m.erlangName(), str(m.isSynchronous).lower())
+          print("is_method_synchronous(#%s{}) -> %s;" % (m.erlangName(), str(m.isSynchronous).lower()))
 
     def genMethodFieldTypes(m):
         """Not currently used - may be useful in future?"""
-        print "method_fieldtypes(%s) -> %s;" % (m.erlangName(), fieldTypeList(m.arguments))
+        print("method_fieldtypes(%s) -> %s;" % (m.erlangName(), fieldTypeList(m.arguments)))
 
     def genMethodFieldNames(m):
-        print "method_fieldnames(%s) -> %s;" % (m.erlangName(), fieldNameList(m.arguments))
+        print("method_fieldnames(%s) -> %s;" % (m.erlangName(), fieldNameList(m.arguments)))
 
     def packMethodFields(fields):
         packed = []
@@ -192,22 +199,22 @@ def genErl(spec):
             type = erlType(f.domain)
             if type == 'bit':
                 for index in range(f.count()):
-                    print "  F%d = ((F%dBits band %d) /= 0)," % \
+                    print("  F%d = ((F%dBits band %d) /= 0)," % \
                           (f.index + index,
                            f.index,
-                           1 << index)
+                           1 << index))
             elif type == 'table':
-                print "  F%d = rabbit_binary_parser:parse_table(F%dTab)," % \
-                      (f.index, f.index)
+                print("  F%d = rabbit_binary_parser:parse_table(F%dTab)," % \
+                      (f.index, f.index))
             # We skip the check on content-bearing methods for
             # speed. This is a sanity check, not a security thing.
             elif type == 'shortstr' and not hasContent:
-                print "  rabbit_binary_parser:assert_utf8(F%d)," % (f.index)
+                print("  rabbit_binary_parser:assert_utf8(F%d)," % (f.index))
             else:
                 pass
 
     def genMethodRecord(m):
-        print "method_record(%s) -> #%s{};" % (m.erlangName(), m.erlangName())
+        print("method_record(%s) -> #%s{};" % (m.erlangName(), m.erlangName()))
 
     def genDecodeMethodFields(m):
         packedFields = packMethodFields(m.arguments)
@@ -217,9 +224,9 @@ def genErl(spec):
         else:
             restSeparator = ''
         recordConstructorExpr = '#%s{%s}' % (m.erlangName(), fieldMapList(m.arguments))
-        print "decode_method_fields(%s, <<%s>>) ->" % (m.erlangName(), binaryPattern)
+        print("decode_method_fields(%s, <<%s>>) ->" % (m.erlangName(), binaryPattern))
         genFieldPostprocessing(packedFields, m.hasContent)
-        print "  %s;" % (recordConstructorExpr,)
+        print("  %s;" % (recordConstructorExpr,))
 
     def genDecodeProperties(c):
         def presentBin(fields):
@@ -228,45 +235,45 @@ def genErl(spec):
         def writePropFieldLine(field):
             i = str(field.index)
             if field.domain == 'bit':
-                print "  {F%s, R%s} = {P%s =/= 0, R%s}," % \
-                    (i, str(field.index + 1), i, i)
+                print("  {F%s, R%s} = {P%s =/= 0, R%s}," % \
+                    (i, str(field.index + 1), i, i))
             else:
-                print "  {F%s, R%s} = if P%s =:= 0 -> {undefined, R%s}; true -> ?%s_VAL(R%s, L%s, V%s, X%s) end," % \
-                    (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i, i)
+                print("  {F%s, R%s} = if P%s =:= 0 -> {undefined, R%s}; true -> ?%s_VAL(R%s, L%s, V%s, X%s) end," % \
+                    (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i, i))
 
         if len(c.fields) == 0:
-            print "decode_properties(%d, <<>>) ->" % (c.index,)
+            print("decode_properties(%d, <<>>) ->" % (c.index,))
         else:
-            print ("decode_properties(%d, %s) ->" %
-                   (c.index, presentBin(c.fields)))
+            print(("decode_properties(%d, %s) ->" %
+                   (c.index, presentBin(c.fields))))
             for field in c.fields:
                 writePropFieldLine(field)
-            print "  <<>> = %s," % ('R' + str(len(c.fields)))
-        print "  #'P_%s'{%s};" % (erlangize(c.name), fieldMapList(c.fields))
+            print("  <<>> = %s," % ('R' + str(len(c.fields))))
+        print("  #'P_%s'{%s};" % (erlangize(c.name), fieldMapList(c.fields)))
 
     def genFieldPreprocessing(packed):
         for f in packed:
             type = erlType(f.domain)
             if type == 'bit':
-                print "  F%dBits = (%s)," % \
+                print("  F%dBits = (%s)," % \
                       (f.index,
                        ' bor '.join(['(bitvalue(F%d) bsl %d)' % (x.index, x.index - f.index)
-                                     for x in f.contents]))
+                                     for x in f.contents])))
             elif type == 'table':
-                print "  F%dTab = rabbit_binary_generator:generate_table(F%d)," % (f.index, f.index)
-                print "  F%dLen = size(F%dTab)," % (f.index, f.index)
+                print("  F%dTab = rabbit_binary_generator:generate_table(F%d)," % (f.index, f.index))
+                print("  F%dLen = size(F%dTab)," % (f.index, f.index))
             elif type == 'shortstr':
-                print "  F%dLen = shortstr_size(F%d)," % (f.index, f.index)
+                print("  F%dLen = shortstr_size(F%d)," % (f.index, f.index))
             elif type == 'longstr':
-                print "  F%dLen = size(F%d)," % (f.index, f.index)
+                print("  F%dLen = size(F%d)," % (f.index, f.index))
             else:
                 pass
 
     def genEncodeMethodFields(m):
         packedFields = packMethodFields(m.arguments)
-        print "encode_method_fields(#%s{%s}) ->" % (m.erlangName(), fieldMapList(m.arguments))
+        print("encode_method_fields(#%s{%s}) ->" % (m.erlangName(), fieldMapList(m.arguments)))
         genFieldPreprocessing(packedFields)
-        print "  <<%s>>;" % (', '.join([methodFieldFragment(f) for f in packedFields]))
+        print("  <<%s>>;" % (', '.join([methodFieldFragment(f) for f in packedFields])))
 
     def genEncodeProperties(c):
         def presentBin(fields):
@@ -275,21 +282,21 @@ def genErl(spec):
         def writePropFieldLine(field):
             i = str(field.index)
             if field.domain == 'bit':
-                print "  {P%s, R%s} = {F%s =:= 1, R%s}," % \
-                    (i, str(field.index + 1), i, i)
+                print("  {P%s, R%s} = {F%s =:= 1, R%s}," % \
+                    (i, str(field.index + 1), i, i))
             else:
-                print "  {P%s, R%s} = if F%s =:= undefined -> {0, R%s}; true -> {1, [?%s_PROP(F%s, L%s) | R%s]} end," % \
-                    (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i)
+                print("  {P%s, R%s} = if F%s =:= undefined -> {0, R%s}; true -> {1, [?%s_PROP(F%s, L%s) | R%s]} end," % \
+                    (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i))
 
-        print "encode_properties(#'P_%s'{%s}) ->" % (erlangize(c.name), fieldMapList(c.fields))
+        print("encode_properties(#'P_%s'{%s}) ->" % (erlangize(c.name), fieldMapList(c.fields)))
         if len(c.fields) == 0:
-            print "  <<>>;"
+            print("  <<>>;")
         else:
-            print "  R0 = [<<>>],"
+            print("  R0 = [<<>>],")
             for field in c.fields:
                 writePropFieldLine(field)
-            print "  list_to_binary([%s | lists:reverse(R%s)]);" % \
-                (presentBin(c.fields), str(len(c.fields)))
+            print("  list_to_binary([%s | lists:reverse(R%s)]);" % \
+                (presentBin(c.fields), str(len(c.fields))))
 
     def messageConstantClass(cls):
         # We do this because 0.8 uses "soft error" and 8.1 uses "soft-error".
@@ -304,13 +311,13 @@ def genErl(spec):
 
     def genLookupException1(c,hardErrorBoolStr):
         n = erlangConstantName(c)
-        print 'lookup_amqp_exception(%s) -> {%s, ?%s, <<"%s">>};' % \
-              (n.lower(), hardErrorBoolStr, n, n)
+        print('lookup_amqp_exception(%s) -> {%s, ?%s, <<"%s">>};' % \
+              (n.lower(), hardErrorBoolStr, n, n))
 
     def genAmqpException(c,v,cls):
         n = erlangConstantName(c)
-        print 'amqp_exception(?%s) -> %s;' % \
-            (n, n.lower())
+        print('amqp_exception(?%s) -> %s;' % \
+            (n, n.lower()))
 
     methods = spec.allMethods()
 
@@ -320,8 +327,8 @@ def genErl(spec):
         module = "%s_%d" % (module, spec.revision)
     if module == "rabbit_framing_amqp_8_0":
         module = "rabbit_framing_amqp_0_8"
-    print "-module(%s)." % module
-    print """-include("rabbit_framing.hrl").
+    print("-module(%s)." % module)
+    print("""-include("rabbit_framing.hrl").
 
 -export([version/0]).
 -export([lookup_method_name/1]).
@@ -339,27 +346,27 @@ def genErl(spec):
 -export([lookup_amqp_exception/1]).
 -export([amqp_exception/1]).
 
-"""
-    print "%% Various types"
-    print "-ifdef(use_specs)."
+""")
+    print("%% Various types")
 
-    print """-export_type([amqp_field_type/0, amqp_property_type/0,
+    print("""-export_type([amqp_field_type/0, amqp_property_type/0,
               amqp_table/0, amqp_array/0, amqp_value/0,
               amqp_method_name/0, amqp_method/0, amqp_method_record/0,
               amqp_method_field_name/0, amqp_property_record/0,
               amqp_exception/0, amqp_exception_code/0, amqp_class_id/0]).
 
--type(amqp_field_type() ::
+-type amqp_field_type() ::
       'longstr' | 'signedint' | 'decimal' | 'timestamp' |
+      'unsignedbyte' | 'unsignedshort' | 'unsignedint' |
       'table' | 'byte' | 'double' | 'float' | 'long' |
-      'short' | 'bool' | 'binary' | 'void' | 'array').
--type(amqp_property_type() ::
+      'short' | 'bool' | 'binary' | 'void' | 'array'.
+-type amqp_property_type() ::
       'shortstr' | 'longstr' | 'octet' | 'short' | 'long' |
-      'longlong' | 'timestamp' | 'bit' | 'table').
+      'longlong' | 'timestamp' | 'bit' | 'table'.
 
--type(amqp_table() :: [{binary(), amqp_field_type(), amqp_value()}]).
--type(amqp_array() :: [{amqp_field_type(), amqp_value()}]).
--type(amqp_value() :: binary() |    % longstr
+-type amqp_table() :: [{binary(), amqp_field_type(), amqp_value()}].
+-type amqp_array() :: [{amqp_field_type(), amqp_value()}].
+-type amqp_value() :: binary() |    % longstr
                       integer() |   % signedint
                       {non_neg_integer(), non_neg_integer()} | % decimal
                       amqp_table() |
@@ -371,57 +378,55 @@ def genErl(spec):
                       boolean() |   % bool
                       binary() |    % binary
                       'undefined' | % void
-                      non_neg_integer() % timestamp
-     ).
-"""
+                      non_neg_integer(). % timestamp
+""")
 
-    print prettyType("amqp_method_name()",
-                     [m.erlangName() for m in methods])
-    print prettyType("amqp_method()",
+    print(prettyType("amqp_method_name()",
+                     [m.erlangName() for m in methods]))
+    print(prettyType("amqp_method()",
                      ["{%s, %s}" % (m.klass.index, m.index) for m in methods],
-                     6)
-    print prettyType("amqp_method_record()",
-                     ["#%s{}" % (m.erlangName()) for m in methods])
+                     6))
+    print(prettyType("amqp_method_record()",
+                     ["#%s{}" % (m.erlangName()) for m in methods]))
     fieldNames = set()
     for m in methods:
-        fieldNames.update(m.arguments)
-    fieldNames = [erlangize(f.name) for f in fieldNames]
-    print prettyType("amqp_method_field_name()",
-                     fieldNames)
-    print prettyType("amqp_property_record()",
-                     ["#'P_%s'{}" % erlangize(c.name) for c in spec.allClasses()])
-    print prettyType("amqp_exception()",
-                     ["'%s'" % erlangConstantName(c).lower() for (c, v, cls) in spec.constants])
-    print prettyType("amqp_exception_code()",
-                     ["%i" % v for (c, v, cls) in spec.constants])
+        fieldNames.update([erlangize(f.name) for f in m.arguments])
+    fieldNames = [f for f in fieldNames]
+    fieldNames.sort()
+    print(prettyType("amqp_method_field_name()",
+                     fieldNames))
+    print(prettyType("amqp_property_record()",
+                     ["#'P_%s'{}" % erlangize(c.name) for c in spec.allClasses()]))
+    print(prettyType("amqp_exception()",
+                     ["'%s'" % erlangConstantName(c).lower() for (c, v, cls) in spec.constants]))
+    print(prettyType("amqp_exception_code()",
+                     ["%i" % v for (c, v, cls) in spec.constants]))
     classIds = set()
     for m in spec.allMethods():
         classIds.add(m.klass.index)
-    print prettyType("amqp_class_id()",
-                     ["%i" % ci for ci in classIds])
-    print prettyType("amqp_class_name()",
-                     ["%s" % c.erlangName() for c in spec.allClasses()])
-    print "-endif. % use_specs"
+    print(prettyType("amqp_class_id()",
+                     ["%i" % ci for ci in classIds]))
+    print(prettyType("amqp_class_name()",
+                     ["%s" % c.erlangName() for c in spec.allClasses()]))
 
-    print """
+    print("""
 %% Method signatures
--ifdef(use_specs).
--spec(version/0 :: () -> {non_neg_integer(), non_neg_integer(), non_neg_integer()}).
--spec(lookup_method_name/1 :: (amqp_method()) -> amqp_method_name()).
--spec(lookup_class_name/1 :: (amqp_class_id()) -> amqp_class_name()).
--spec(method_id/1 :: (amqp_method_name()) -> amqp_method()).
--spec(method_has_content/1 :: (amqp_method_name()) -> boolean()).
--spec(is_method_synchronous/1 :: (amqp_method_record()) -> boolean()).
--spec(method_record/1 :: (amqp_method_name()) -> amqp_method_record()).
--spec(method_fieldnames/1 :: (amqp_method_name()) -> [amqp_method_field_name()]).
--spec(decode_method_fields/2 ::
-        (amqp_method_name(), binary()) -> amqp_method_record() | rabbit_types:connection_exit()).
--spec(decode_properties/2 :: (non_neg_integer(), binary()) -> amqp_property_record()).
--spec(encode_method_fields/1 :: (amqp_method_record()) -> binary()).
--spec(encode_properties/1 :: (amqp_property_record()) -> binary()).
--spec(lookup_amqp_exception/1 :: (amqp_exception()) -> {boolean(), amqp_exception_code(), binary()}).
--spec(amqp_exception/1 :: (amqp_exception_code()) -> amqp_exception()).
--endif. % use_specs
+-spec version() -> {non_neg_integer(), non_neg_integer(), non_neg_integer()}.
+-spec lookup_method_name(amqp_method()) -> amqp_method_name().
+-spec lookup_class_name(amqp_class_id()) -> amqp_class_name().
+-spec method_id(amqp_method_name()) -> amqp_method().
+-spec method_has_content(amqp_method_name()) -> boolean().
+-spec is_method_synchronous(amqp_method_record()) -> boolean().
+-spec method_record(amqp_method_name()) -> amqp_method_record().
+-spec method_fieldnames(amqp_method_name()) -> [amqp_method_field_name()].
+-spec decode_method_fields(amqp_method_name(), binary()) ->
+          amqp_method_record() | rabbit_types:connection_exit().
+-spec decode_properties(non_neg_integer(), binary()) -> amqp_property_record().
+-spec encode_method_fields(amqp_method_record()) -> binary().
+-spec encode_properties(amqp_property_record()) -> binary().
+-spec lookup_amqp_exception(amqp_exception()) ->
+          {boolean(), amqp_exception_code(), binary()}.
+-spec amqp_exception(amqp_exception_code()) -> amqp_exception().
 
 bitvalue(true) -> 1;
 bitvalue(false) -> 0;
@@ -506,52 +511,52 @@ shortstr_size(S) ->
             T = rabbit_binary_generator:generate_table(X),
             <<(size(T)):32, T/binary>>
         end).
-"""
+""")
     version = "{%d, %d, %d}" % (spec.major, spec.minor, spec.revision)
     if version == '{8, 0, 0}': version = '{0, 8, 0}'
-    print "version() -> %s." % (version)
+    print("version() -> %s." % (version))
 
     for m in methods: genLookupMethodName(m)
-    print "lookup_method_name({_ClassId, _MethodId} = Id) -> exit({unknown_method_id, Id})."
+    print("lookup_method_name({_ClassId, _MethodId} = Id) -> exit({unknown_method_id, Id}).")
 
     for c in spec.allClasses(): genLookupClassName(c)
-    print "lookup_class_name(ClassId) -> exit({unknown_class_id, ClassId})."
+    print("lookup_class_name(ClassId) -> exit({unknown_class_id, ClassId}).")
 
     for m in methods: genMethodId(m)
-    print "method_id(Name) -> exit({unknown_method_name, Name})."
+    print("method_id(Name) -> exit({unknown_method_name, Name}).")
 
     for m in methods: genMethodHasContent(m)
-    print "method_has_content(Name) -> exit({unknown_method_name, Name})."
+    print("method_has_content(Name) -> exit({unknown_method_name, Name}).")
 
     for m in methods: genMethodIsSynchronous(m)
-    print "is_method_synchronous(Name) -> exit({unknown_method_name, Name})."
+    print("is_method_synchronous(Name) -> exit({unknown_method_name, Name}).")
 
     for m in methods: genMethodRecord(m)
-    print "method_record(Name) -> exit({unknown_method_name, Name})."
+    print("method_record(Name) -> exit({unknown_method_name, Name}).")
 
     for m in methods: genMethodFieldNames(m)
-    print "method_fieldnames(Name) -> exit({unknown_method_name, Name})."
+    print("method_fieldnames(Name) -> exit({unknown_method_name, Name}).")
 
     for m in methods: genDecodeMethodFields(m)
-    print "decode_method_fields(Name, BinaryFields) ->"
-    print "  rabbit_misc:frame_error(Name, BinaryFields)."
+    print("decode_method_fields(Name, BinaryFields) ->")
+    print("  rabbit_misc:frame_error(Name, BinaryFields).")
 
     for c in spec.allClasses(): genDecodeProperties(c)
-    print "decode_properties(ClassId, _BinaryFields) -> exit({unknown_class_id, ClassId})."
+    print("decode_properties(ClassId, _BinaryFields) -> exit({unknown_class_id, ClassId}).")
 
     for m in methods: genEncodeMethodFields(m)
-    print "encode_method_fields(Record) -> exit({unknown_method_name, element(1, Record)})."
+    print("encode_method_fields(Record) -> exit({unknown_method_name, element(1, Record)}).")
 
     for c in spec.allClasses(): genEncodeProperties(c)
-    print "encode_properties(Record) -> exit({unknown_properties_record, Record})."
+    print("encode_properties(Record) -> exit({unknown_properties_record, Record}).")
 
     for (c,v,cls) in spec.constants: genLookupException(c,v,cls)
-    print "lookup_amqp_exception(Code) ->"
-    print "  rabbit_log:warning(\"Unknown AMQP error code '~p'~n\", [Code]),"
-    print "  {true, ?INTERNAL_ERROR, <<\"INTERNAL_ERROR\">>}."
+    print("lookup_amqp_exception(Code) ->")
+    print("  rabbit_log:warning(\"Unknown AMQP error code '~p'~n\", [Code]),")
+    print("  {true, ?INTERNAL_ERROR, <<\"INTERNAL_ERROR\">>}.")
 
     for(c,v,cls) in spec.constants: genAmqpException(c,v,cls)
-    print "amqp_exception(_Code) -> undefined."
+    print("amqp_exception(_Code) -> undefined.")
 
 def genHrl(spec):
     def fieldNameList(fields):
@@ -559,7 +564,7 @@ def genHrl(spec):
 
     def fieldNameListDefaults(fields):
         def fillField(field):
-            result = erlangize(f.name)
+            result = erlangize(field.name)
             if field.defaultvalue != None:
                 conv_fn = erlangDefaultValueTypeConvMap[type(field.defaultvalue)]
                 result += ' = ' + conv_fn(field.defaultvalue)
@@ -569,18 +574,18 @@ def genHrl(spec):
     methods = spec.allMethods()
 
     printFileHeader()
-    print "-define(PROTOCOL_PORT, %d)." % (spec.port)
+    print("-define(PROTOCOL_PORT, %d)." % (spec.port))
 
     for (c,v,cls) in spec.constants:
-        print "-define(%s, %s)." % (erlangConstantName(c), v)
+        print("-define(%s, %s)." % (erlangConstantName(c), v))
 
-    print "%% Method field records."
+    print("%% Method field records.")
     for m in methods:
-        print "-record(%s, {%s})." % (m.erlangName(), fieldNameListDefaults(m.arguments))
+        print("-record(%s, {%s})." % (m.erlangName(), fieldNameListDefaults(m.arguments)))
 
-    print "%% Class property records."
+    print("%% Class property records.")
     for c in spec.allClasses():
-        print "-record('P_%s', {%s})." % (erlangize(c.name), fieldNameList(c.fields))
+        print("-record('P_%s', {%s})." % (erlangize(c.name), fieldNameList(c.fields)))
 
 
 def generateErl(specPath):
diff --git a/deps/rabbit_common/erlang.mk b/deps/rabbit_common/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbit_common/include/old_builtin_types.hrl b/deps/rabbit_common/include/old_builtin_types.hrl
new file mode 100644 (file)
index 0000000..db3666b
--- /dev/null
@@ -0,0 +1,30 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+%% Old builtin types found in ERlang R16B03.
+-ifdef(use_old_builtin_types).
+-define(ARRAY_TYPE, array).
+-define(DICT_TYPE, dict).
+-define(GB_SET_TYPE, gb_set).
+-define(QUEUE_TYPE, queue).
+-define(SET_TYPE, set).
+-else.
+-define(ARRAY_TYPE, array:array).
+-define(DICT_TYPE, dict:dict).
+-define(GB_SET_TYPE, gb_sets:set).
+-define(QUEUE_TYPE, queue:queue).
+-define(SET_TYPE, sets:set).
+-endif.
similarity index 77%
rename from rabbitmq-server/include/rabbit.hrl
rename to deps/rabbit_common/include/rabbit.hrl
index 5b90956122363596f6f4245997d5dee49002570d..a514390bdda7456d5918af199b9557ba04ae2050 100644 (file)
@@ -14,6 +14,8 @@
 %% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
 %%
 
+-include("old_builtin_types.hrl").
+
 %% Passed around most places
 -record(user, {username,
                tags,
 -record(auth_user, {username,
                     tags,
                     impl}).
+%% Passed to authz backends.
+-record(authz_socket_info, {sockname, peername}).
 
 %% Implementation for the internal auth backend
--record(internal_user, {username, password_hash, tags}).
+-record(internal_user, {
+    username,
+    password_hash,
+    tags,
+    %% password hashing implementation module,
+    %% typically rabbit_password_hashing_* but can
+    %% come from a plugin
+    hashing_algorithm}).
 -record(permission, {configure, write, read}).
 -record(user_vhost, {username, virtual_host}).
 -record(user_permission, {user_vhost, permission}).
          payload_fragments_rev %% list of binaries, in reverse order (!)
          }).
 
--record(resource, {virtual_host, kind, name}).
+-record(resource, {
+    virtual_host,
+    %% exchange, queue, ...
+    kind,
+    %% name as a binary
+    name
+}).
 
 %% fields described as 'transient' here are cleared when writing to
 %% rabbit_durable_<thing>
@@ -61,7 +78,8 @@
           policy,                      %% durable, implicit update as above
           gm_pids,                     %% transient
           decorators,                  %% transient, recalculated as above
-          state}).                     %% durable (have we crashed?)
+          state,                       %% durable (have we crashed?)
+          policy_version}).
 
 -record(exchange_serial, {name, next}).
 
 
 -record(runtime_parameters, {key, value}).
 
--record(basic_message, {exchange_name, routing_keys = [], content, id,
-                        is_persistent}).
+-record(basic_message,
+        {exchange_name,     %% The exchange where the message was received
+         routing_keys = [], %% Routing keys used during publish
+         content,           %% The message content
+         id,                %% A `rabbit_guid:gen()` generated id
+         is_persistent}).   %% Whether the message was published as persistent
+
+-record(delivery,
+        {mandatory,  %% Whether the message was published as mandatory
+         confirm,    %% Whether the message needs confirming
+         sender,     %% The pid of the process that created the delivery
+         message,    %% The #basic_message record
+         msg_seq_no, %% Msg Sequence Number from the channel publish_seqno field
+         flow}).     %% Should flow control be used for this delivery
 
--record(ssl_socket, {tcp, ssl}).
--record(delivery, {mandatory, confirm, sender, message, msg_seq_no, flow}).
 -record(amqp_error, {name, explanation = "", method = none}).
 
 -record(event, {type, props, reference = undefined, timestamp}).
 
 %%----------------------------------------------------------------------------
 
--define(COPYRIGHT_MESSAGE, "Copyright (C) 2007-2015 Pivotal Software, Inc.").
+-define(COPYRIGHT_MESSAGE, "Copyright (C) 2007-2016 Pivotal Software, Inc.").
 -define(INFORMATION_MESSAGE, "Licensed under the MPL.  See http://www.rabbitmq.com/").
--define(ERTS_MINIMUM, "5.6.3").
+-define(OTP_MINIMUM, "R16B03").
+-define(ERTS_MINIMUM, "5.10.4").
 
 %% EMPTY_FRAME_SIZE, 8 = 1 + 2 + 4 + 1
 %%  - 1 byte of frame type
 -define(EMPTY_FRAME_SIZE, 8).
 
 -define(MAX_WAIT, 16#ffffffff).
+-define(SUPERVISOR_WAIT, infinity).
+-define(WORKER_WAIT, 30000).
 
 -define(HIBERNATE_AFTER_MIN,        1000).
 -define(DESIRED_HIBERNATE,         10000).
 -define(ROUTING_HEADERS, [<<"CC">>, <<"BCC">>]).
 -define(DELETED_HEADER, <<"BCC">>).
 
+-define(EXCHANGE_DELETE_IN_PROGRESS_COMPONENT, <<"exchange-delete-in-progress">>).
+
+-define(CHANNEL_OPERATION_TIMEOUT, rabbit_misc:get_channel_operation_timeout()).
+
 %% Trying to send a term across a cluster larger than 2^31 bytes will
 %% cause the VM to exit with "Absurdly large distribution output data
 %% buffer". So we limit the max message size to 2^31 - 10^6 bytes (1MB
diff --git a/deps/rabbit_common/include/rabbit_misc.hrl b/deps/rabbit_common/include/rabbit_misc.hrl
new file mode 100644 (file)
index 0000000..26dce4b
--- /dev/null
@@ -0,0 +1,17 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-define(RPC_TIMEOUT, infinity).
similarity index 96%
rename from rabbitmq-server/include/rabbit_msg_store.hrl
rename to deps/rabbit_common/include/rabbit_msg_store.hrl
index 8bcf2ce6298b6ea5bc5056e061b0edfbf75ba761..8cf830cf803a4f843e6406638ea5451e3363e460 100644 (file)
 
 -include("rabbit.hrl").
 
--ifdef(use_specs).
-
 -type(msg() :: any()).
 
--endif.
-
 -record(msg_location, {msg_id, ref_count, file, offset, total_size}).
diff --git a/deps/rabbit_common/mk/rabbitmq-build.mk b/deps/rabbit_common/mk/rabbitmq-build.mk
new file mode 100644 (file)
index 0000000..52b3ed3
--- /dev/null
@@ -0,0 +1,70 @@
+# --------------------------------------------------------------------
+# Compiler flags.
+# --------------------------------------------------------------------
+
+# FIXME: We copy Erlang.mk default flags here: rabbitmq-build.mk is
+# loaded as a plugin, so before those variables are defined. And because
+# Erlang.mk uses '?=', the flags we set here override the default set.
+#
+# See: https://github.com/ninenines/erlang.mk/issues/502
+
+WARNING_OPTS += +debug_info \
+               +warn_export_vars \
+               +warn_shadow_vars \
+               +warn_obsolete_guard
+ERLC_OPTS += -Werror $(WARNING_OPTS)
+TEST_ERLC_OPTS += $(WARNING_OPTS)
+
+define compare_version
+$(shell awk 'BEGIN {
+       split("$(1)", v1, ".");
+       version1 = v1[1] * 1000000 + v1[2] * 10000 + v1[3] * 100 + v1[4];
+
+       split("$(2)", v2, ".");
+       version2 = v2[1] * 1000000 + v2[2] * 10000 + v2[3] * 100 + v2[4];
+
+       if (version1 $(3) version2) {
+               print "true";
+       } else {
+               print "false";
+       }
+}')
+endef
+
+# Erlang R16B03 has no support for new types in Erlang 17.0, leading to
+# a build-time error.
+ERTS_VER := $(shell erl -version 2>&1 | sed -E 's/.* version //')
+old_builtin_types_MAX_ERTS_VER = 6.0
+ifeq ($(call compare_version,$(ERTS_VER),$(old_builtin_types_MAX_ERTS_VER),<),true)
+RMQ_ERLC_OPTS += -Duse_old_builtin_types
+endif
+
+# Push our compilation options to both the normal and test ERLC_OPTS.
+ERLC_OPTS += $(RMQ_ERLC_OPTS)
+TEST_ERLC_OPTS += $(RMQ_ERLC_OPTS)
+
+# --------------------------------------------------------------------
+# Common test flags.
+# --------------------------------------------------------------------
+
+# Disable most messages on Travis and Concourse.
+#
+# Concourse doesn't set any environment variables to help us automate
+# things. In rabbitmq-ci, we run tests under the `concourse` user so,
+# look at that...
+CT_QUIET_FLAGS = -verbosity 50 \
+                -erl_args \
+                -kernel error_logger silent
+ifdef TRAVIS
+CT_OPTS += $(CT_QUIET_FLAGS)
+endif
+ifdef CONCOURSE
+CT_OPTS += $(CT_QUIET_FLAGS)
+endif
+
+# Enable JUnit-like report on Jenkins. Jenkins parses those reports so
+# the results can be browsed from its UI. Furthermore, it displays a
+# graph showing evolution of the results over time.
+ifdef JENKINS_HOME
+CT_OPTS += -ct_hooks cth_surefire
+endif
diff --git a/deps/rabbit_common/mk/rabbitmq-components.mk b/deps/rabbit_common/mk/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/deps/rabbit_common/mk/rabbitmq-dist.mk b/deps/rabbit_common/mk/rabbitmq-dist.mk
new file mode 100644 (file)
index 0000000..94fef98
--- /dev/null
@@ -0,0 +1,147 @@
+.PHONY: dist test-dist do-dist clean-dist
+
+DIST_DIR = plugins
+
+dist_verbose_0 = @echo " DIST  " $@;
+dist_verbose_2 = set -x;
+dist_verbose = $(dist_verbose_$(V))
+
+# We take the version of an Erlang application from the .app file. This
+# macro is called like this:
+#
+#   $(call get_app_version,/path/to/name.app.src)
+
+define get_app_version
+$(shell awk '
+/{ *vsn *, *"/ {
+       vsn=$$0;
+       sub(/.*{ *vsn, *"/, "", vsn);
+       sub(/".*/, "", vsn);
+       print vsn;
+       exit;
+}' $(1))
+endef
+
+# Define the target to create an .ez plugin archive. This macro is
+# called like this:
+#
+#   $(call do_ez_target,app_name,app_version,app_dir)
+
+define do_ez_target
+dist_$(1)_ez_dir = $$(if $(2),$(DIST_DIR)/$(1)-$(2),$$(if $$(VERSION),$(DIST_DIR)/$(1)-$$(VERSION),$(DIST_DIR)/$(1)))
+dist_$(1)_ez = $$(dist_$(1)_ez_dir).ez
+
+
+$$(dist_$(1)_ez): APP     = $(1)
+$$(dist_$(1)_ez): VSN     = $(2)
+$$(dist_$(1)_ez): SRC_DIR = $(3)
+$$(dist_$(1)_ez): EZ_DIR  = $$(abspath $$(dist_$(1)_ez_dir))
+$$(dist_$(1)_ez): EZ      = $$(dist_$(1)_ez)
+$$(dist_$(1)_ez): $$(if $$(wildcard $(3)/ebin $(3)/include $(3)/priv),\
+       $$(call core_find,$$(wildcard $(3)/ebin $(3)/include $(3)/priv),*),)
+
+# If the application's Makefile defines a `list-dist-deps` target, we
+# use it to populate the dependencies list. This is useful when the
+# application has also a `prepare-dist` target to modify the created
+# tree before we make an archive out of it.
+
+ifeq ($$(shell test -f $(3)/rabbitmq-components.mk \
+       && grep -q '^list-dist-deps::' $(3)/Makefile && echo yes),yes)
+$$(dist_$(1)_ez): $$(patsubst %,$(3)/%, \
+       $$(shell $(MAKE) --no-print-directory -C $(3) list-dist-deps \
+       APP=$(1) VSN=$(2) EZ_DIR=$$(abspath $$(dist_$(1)_ez_dir))))
+endif
+
+DIST_EZS += $$(dist_$(1)_ez)
+
+endef
+
+# Real entry point: it tests the existence of an .app file to determine
+# if it is an Erlang application (and therefore if it should be provided
+# as an .ez plugin archive). Then, if calls do_ez_target. It should be
+# called as:
+#
+#   $(call ez_target,app_name)
+
+define ez_target
+dist_$(1)_appdir = $$(if $$(filter $(PROJECT),$(1)), \
+                       $(CURDIR), \
+                       $$(if $$(shell test -d $(APPS_DIR)/$(1) && echo OK), \
+                             $(APPS_DIR)/$(1), \
+                             $(DEPS_DIR)/$(1)))
+dist_$(1)_appfile = $$(dist_$(1)_appdir)/ebin/$(1).app
+
+$$(if $$(shell test -f $$(dist_$(1)_appfile) && echo OK), \
+  $$(eval $$(call do_ez_target,$(1),$$(call get_app_version,$$(dist_$(1)_appfile)),$$(dist_$(1)_appdir))))
+
+endef
+
+ifneq ($(filter do-dist,$(MAKECMDGOALS)),)
+# The following code is evaluated only when running "make do-dist",
+# otherwise it would trigger an infinite loop, as this code calls "make
+# list-dist-deps" (see do_ez_target).
+ifdef DIST_PLUGINS_LIST
+# Now, try to create an .ez target for the top-level project and all
+# dependencies.
+
+ifeq ($(wildcard $(DIST_PLUGINS_LIST)),)
+$(error DIST_PLUGINS_LIST ($(DIST_PLUGINS_LIST)) is missing)
+endif
+
+$(eval $(foreach app, \
+  $(filter-out rabbit,$(sort $(notdir $(shell cat $(DIST_PLUGINS_LIST)))) $(PROJECT)), \
+  $(call ez_target,$(app))))
+endif
+endif
+
+# The actual recipe to create the .ez plugin archive. Some variables are
+# defined in the do_ez_target macro above. All .ez archives are also
+# listed in this do_ez_target macro.
+
+RSYNC ?= rsync
+RSYNC_V_0 =
+RSYNC_V_1 = -v
+RSYNC_V = $(RSYNC_V_$(V))
+
+ZIP ?= zip
+ZIP_V_0 = -q
+ZIP_V_1 =
+ZIP_V = $(ZIP_V_$(V))
+
+$(DIST_DIR)/%.ez:
+       $(verbose) rm -rf $(EZ_DIR) $(EZ)
+       $(verbose) mkdir -p $(EZ_DIR)
+       $(dist_verbose) $(RSYNC) -a $(RSYNC_V) \
+               --include '/ebin/***' \
+               --include '/include/***' \
+               --include '/priv/***' \
+               --exclude '*' \
+               $(SRC_DIR)/ $(EZ_DIR)/
+       @# Give a chance to the application to make any modification it
+       @# wants to the tree before we make an archive.
+       $(verbose) ! (test -f $(SRC_DIR)/rabbitmq-components.mk \
+               && grep -q '^prepare-dist::' $(SRC_DIR)/Makefile) || \
+               $(MAKE) --no-print-directory -C $(SRC_DIR) prepare-dist \
+               APP=$(APP) VSN=$(VSN) EZ_DIR=$(EZ_DIR)
+       $(verbose) (cd $(DIST_DIR) && $(ZIP) $(ZIP_V) -r $*.ez $*)
+       $(verbose) rm -rf $(EZ_DIR)
+
+# We need to recurse because the top-level make instance is evaluated
+# before dependencies are downloaded.
+
+MAYBE_APPS_LIST = $(if $(shell test -f $(ERLANG_MK_TMP)/apps.log && echo OK),$(ERLANG_MK_TMP)/apps.log)
+
+dist:: $(ERLANG_MK_RECURSIVE_DEPS_LIST) all
+       $(gen_verbose) $(MAKE) do-dist DIST_PLUGINS_LIST="$(ERLANG_MK_RECURSIVE_DEPS_LIST) $(MAYBE_APPS_LIST)"
+
+test-dist:: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) test-build
+       $(gen_verbose) $(MAKE) do-dist DIST_PLUGINS_LIST="$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) $(MAYBE_APPS_LIST)"
+
+do-dist:: $(DIST_EZS)
+       $(verbose) unwanted='$(filter-out $(DIST_EZS),$(wildcard $(DIST_DIR)/*.ez))'; \
+       test -z "$$unwanted" || (echo " RM     $$unwanted" && rm -f $$unwanted)
+
+clean-dist::
+       $(gen_verbose) rm -rf $(DIST_DIR)
+
+clean:: clean-dist
diff --git a/deps/rabbit_common/mk/rabbitmq-plugin.mk b/deps/rabbit_common/mk/rabbitmq-plugin.mk
new file mode 100644 (file)
index 0000000..2e0db8e
--- /dev/null
@@ -0,0 +1,15 @@
+ifeq ($(filter rabbitmq-build.mk,$(notdir $(MAKEFILE_LIST))),)
+include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-build.mk
+endif
+
+ifeq ($(filter rabbitmq-dist.mk,$(notdir $(MAKEFILE_LIST))),)
+include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-dist.mk
+endif
+
+ifeq ($(filter rabbitmq-run.mk,$(notdir $(MAKEFILE_LIST))),)
+include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-run.mk
+endif
+
+ifeq ($(filter rabbitmq-tools.mk,$(notdir $(MAKEFILE_LIST))),)
+include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-tools.mk
+endif
diff --git a/deps/rabbit_common/mk/rabbitmq-run.mk b/deps/rabbit_common/mk/rabbitmq-run.mk
new file mode 100644 (file)
index 0000000..5bf78b3
--- /dev/null
@@ -0,0 +1,269 @@
+.PHONY: run-broker run-background-broker run-node run-background-node \
+       start-background-node start-rabbit-on-node \
+       stop-rabbit-on-node set-resource-alarm clear-resource-alarm \
+       stop-node
+
+ifeq ($(filter rabbitmq-dist.mk,$(notdir $(MAKEFILE_LIST))),)
+include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-dist.mk
+endif
+
+exec_verbose_0 = @echo " EXEC  " $@;
+exec_verbose_2 = set -x;
+exec_verbose = $(exec_verbose_$(V))
+
+ifeq ($(PLATFORM),msys2)
+TEST_TMPDIR ?= $(TEMP)/rabbitmq-test-instances
+else
+TMPDIR ?= /tmp
+TEST_TMPDIR ?= $(TMPDIR)/rabbitmq-test-instances
+endif
+
+# Location of the scripts controlling the broker.
+ifeq ($(PROJECT),rabbit)
+RABBITMQ_BROKER_DIR ?= $(CURDIR)
+else
+RABBITMQ_BROKER_DIR ?= $(DEPS_DIR)/rabbit
+endif
+RABBITMQ_SCRIPTS_DIR ?= $(RABBITMQ_BROKER_DIR)/scripts
+
+ifeq ($(PLATFORM),msys2)
+RABBITMQ_PLUGINS ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-plugins.bat
+RABBITMQ_SERVER ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-server.bat
+RABBITMQCTL ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmqctl.bat
+else
+RABBITMQ_PLUGINS ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-plugins
+RABBITMQ_SERVER ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-server
+RABBITMQCTL ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmqctl
+endif
+
+export RABBITMQ_SCRIPTS_DIR RABBITMQCTL RABBITMQ_PLUGINS RABBITMQ_SERVER
+
+# We export MAKE to be sure scripts and tests use the proper command.
+export MAKE
+
+# We need to pass the location of codegen to the Java client ant
+# process.
+CODEGEN_DIR = $(DEPS_DIR)/rabbitmq_codegen
+PYTHONPATH = $(CODEGEN_DIR)
+export PYTHONPATH
+
+ANT ?= ant
+ANT_FLAGS += -Dmake.bin=$(MAKE) \
+            -DUMBRELLA_AVAILABLE=true \
+            -Drabbitmqctl.bin=$(RABBITMQCTL) \
+            -Dsibling.codegen.dir=$(CODEGEN_DIR)
+ifeq ($(PROJECT),rabbitmq_test)
+ANT_FLAGS += -Dsibling.rabbitmq_test.dir=$(CURDIR)
+else
+ANT_FLAGS += -Dsibling.rabbitmq_test.dir=$(DEPS_DIR)/rabbitmq_test
+endif
+export ANT ANT_FLAGS
+
+node_tmpdir = $(TEST_TMPDIR)/$(1)
+node_pid_file = $(call node_tmpdir,$(1))/$(1).pid
+node_log_base = $(call node_tmpdir,$(1))/log
+node_mnesia_base = $(call node_tmpdir,$(1))/mnesia
+node_mnesia_dir = $(call node_mnesia_base,$(1))/$(1)
+node_plugins_expand_dir = $(call node_tmpdir,$(1))/plugins
+node_enabled_plugins_file = $(call node_tmpdir,$(1))/enabled_plugins
+
+# Broker startup variables for the test environment.
+RABBITMQ_NODENAME ?= rabbit
+RABBITMQ_NODENAME_FOR_PATHS ?= $(RABBITMQ_NODENAME)
+NODE_TMPDIR ?= $(call node_tmpdir,$(RABBITMQ_NODENAME_FOR_PATHS))
+
+RABBITMQ_PID_FILE ?= $(call node_pid_file,$(RABBITMQ_NODENAME_FOR_PATHS))
+RABBITMQ_LOG_BASE ?= $(call node_log_base,$(RABBITMQ_NODENAME_FOR_PATHS))
+RABBITMQ_MNESIA_BASE ?= $(call node_mnesia_base,$(RABBITMQ_NODENAME_FOR_PATHS))
+RABBITMQ_MNESIA_DIR ?= $(call node_mnesia_dir,$(RABBITMQ_NODENAME_FOR_PATHS))
+RABBITMQ_PLUGINS_EXPAND_DIR ?= $(call node_plugins_expand_dir,$(RABBITMQ_NODENAME_FOR_PATHS))
+RABBITMQ_ENABLED_PLUGINS_FILE ?= $(call node_enabled_plugins_file,$(RABBITMQ_NODENAME_FOR_PATHS))
+
+# erlang.mk adds dependencies' ebin directory to ERL_LIBS. This is
+# a sane default, but we prefer to rely on the .ez archives in the
+# `plugins` directory so the plugin code is executed. The `plugins`
+# directory is added to ERL_LIBS by rabbitmq-env.
+DIST_ERL_LIBS = $(shell echo "$(filter-out $(DEPS_DIR),$(subst :, ,$(ERL_LIBS)))" | tr ' ' :)
+
+define basic_script_env_settings
+MAKE="$(MAKE)" \
+ERL_LIBS="$(DIST_ERL_LIBS)" \
+RABBITMQ_NODENAME="$(1)" \
+RABBITMQ_NODE_IP_ADDRESS="$(RABBITMQ_NODE_IP_ADDRESS)" \
+RABBITMQ_NODE_PORT="$(3)" \
+RABBITMQ_PID_FILE="$(call node_pid_file,$(2))" \
+RABBITMQ_LOG_BASE="$(call node_log_base,$(2))" \
+RABBITMQ_MNESIA_BASE="$(call node_mnesia_base,$(2))" \
+RABBITMQ_MNESIA_DIR="$(call node_mnesia_dir,$(2))" \
+RABBITMQ_PLUGINS_DIR="$(CURDIR)/$(DIST_DIR)" \
+RABBITMQ_PLUGINS_EXPAND_DIR="$(call node_plugins_expand_dir,$(2))" \
+RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)"
+endef
+
+BASIC_SCRIPT_ENV_SETTINGS = \
+       $(call basic_script_env_settings,$(RABBITMQ_NODENAME),$(RABBITMQ_NODENAME_FOR_PATHS),$(RABBITMQ_NODE_PORT)) \
+       RABBITMQ_ENABLED_PLUGINS_FILE="$(RABBITMQ_ENABLED_PLUGINS_FILE)"
+
+ERL_CALL := $(shell $(ERL) -eval 'io:format("~s~n", [filename:join(code:lib_dir(erl_interface, bin), case os:type() of {win32, _} -> "erl_call.exe"; _ -> "erl_call" end)]), halt().')
+ERL_CALL_OPTS ?= -sname $(RABBITMQ_NODENAME) -e
+
+test-tmpdir:
+       $(verbose) mkdir -p $(TEST_TMPDIR)
+
+virgin-test-tmpdir:
+       $(gen_verbose) rm -rf $(TEST_TMPDIR)
+       $(verbose) mkdir -p $(TEST_TMPDIR)
+
+node-tmpdir:
+       $(verbose) mkdir -p $(RABBITMQ_LOG_BASE) \
+               $(RABBITMQ_MNESIA_BASE) \
+               $(RABBITMQ_PLUGINS_EXPAND_DIR)
+
+virgin-node-tmpdir:
+       $(gen_verbose) rm -rf $(NODE_TMPDIR)
+       $(verbose) mkdir -p $(RABBITMQ_LOG_BASE) \
+               $(RABBITMQ_MNESIA_BASE) \
+               $(RABBITMQ_PLUGINS_EXPAND_DIR)
+
+.PHONY: test-tmpdir virgin-test-tmpdir node-tmpdir virgin-node-tmpdir
+
+ifeq ($(wildcard ebin/test),)
+$(RABBITMQ_ENABLED_PLUGINS_FILE): dist
+endif
+
+$(RABBITMQ_ENABLED_PLUGINS_FILE): node-tmpdir
+       $(verbose) rm -f $@
+       $(gen_verbose) $(BASIC_SCRIPT_ENV_SETTINGS) \
+         $(RABBITMQ_PLUGINS) set --offline \
+         $$($(BASIC_SCRIPT_ENV_SETTINGS) $(RABBITMQ_PLUGINS) list -m | tr '\n' ' ')
+
+# --------------------------------------------------------------------
+# Run a full RabbitMQ.
+# --------------------------------------------------------------------
+
+define test_rabbitmq_config
+%% vim:ft=erlang:
+
+[
+  {rabbit, [
+      {loopback_users, []}
+    ]}
+].
+endef
+
+define test_rabbitmq_config_with_tls
+%% vim:ft=erlang:
+
+[
+  {rabbit, [
+      {loopback_users, []},
+      {ssl_listeners, [5671]},
+      {ssl_options, [
+          {cacertfile, "$(TEST_TLS_CERTS_DIR_in_config)/testca/cacert.pem"},
+          {certfile,   "$(TEST_TLS_CERTS_DIR_in_config)/server/cert.pem"},
+          {keyfile,    "$(TEST_TLS_CERTS_DIR_in_config)/server/key.pem"},
+          {verify, verify_peer},
+          {fail_if_no_peer_cert, false},
+          {honor_cipher_order, true}]}
+    ]}
+].
+endef
+
+TEST_CONFIG_FILE ?= $(TEST_TMPDIR)/test.config
+TEST_TLS_CERTS_DIR = $(TEST_TMPDIR)/tls-certs
+ifeq ($(PLATFORM),msys2)
+TEST_TLS_CERTS_DIR_in_config = $(shell echo $(TEST_TLS_CERTS_DIR) | sed -E "s,^/([^/]+),\1:,")
+else
+TEST_TLS_CERTS_DIR_in_config = $(TEST_TLS_CERTS_DIR)
+endif
+
+.PHONY: $(TEST_CONFIG_FILE)
+$(TEST_CONFIG_FILE): node-tmpdir
+       $(gen_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(config)))" > $@
+
+$(TEST_TLS_CERTS_DIR): node-tmpdir
+       $(gen_verbose) $(MAKE) -C $(DEPS_DIR)/rabbitmq_ct_helpers/tools/tls-certs \
+               DIR=$(TEST_TLS_CERTS_DIR) all
+
+show-test-tls-certs-dir: $(TEST_TLS_CERTS_DIR)
+       @echo $(TEST_TLS_CERTS_DIR)
+
+run-broker run-tls-broker: RABBITMQ_CONFIG_FILE = $(basename $(TEST_CONFIG_FILE))
+run-broker:     config := $(test_rabbitmq_config)
+run-tls-broker: config := $(test_rabbitmq_config_with_tls)
+run-tls-broker: $(TEST_TLS_CERTS_DIR)
+
+run-broker run-tls-broker: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE) \
+    $(TEST_CONFIG_FILE)
+       $(BASIC_SCRIPT_ENV_SETTINGS) \
+         RABBITMQ_ALLOW_INPUT=true \
+         RABBITMQ_CONFIG_FILE=$(RABBITMQ_CONFIG_FILE) \
+         $(RABBITMQ_SERVER)
+
+run-background-broker: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE)
+       $(BASIC_SCRIPT_ENV_SETTINGS) \
+         $(RABBITMQ_SERVER) -detached
+
+# --------------------------------------------------------------------
+# Run a bare Erlang node.
+# --------------------------------------------------------------------
+
+run-node: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE)
+       $(BASIC_SCRIPT_ENV_SETTINGS) \
+         RABBITMQ_NODE_ONLY=true \
+         RABBITMQ_ALLOW_INPUT=true \
+         $(RABBITMQ_SERVER)
+
+run-background-node: virgin-node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE)
+       $(BASIC_SCRIPT_ENV_SETTINGS) \
+         RABBITMQ_NODE_ONLY=true \
+         $(RABBITMQ_SERVER) -detached
+
+# --------------------------------------------------------------------
+# Used by testsuites.
+# --------------------------------------------------------------------
+
+ifneq ($(LOG_TO_STDIO),yes)
+REDIRECT_STDIO = > $(RABBITMQ_LOG_BASE)/startup_log \
+                2> $(RABBITMQ_LOG_BASE)/startup_err
+endif
+
+start-background-node: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE)
+       $(BASIC_SCRIPT_ENV_SETTINGS) \
+         RABBITMQ_NODE_ONLY=true \
+         $(RABBITMQ_SERVER) \
+         $(REDIRECT_STDIO) &
+       ERL_LIBS="$(DIST_ERL_LIBS)" \
+         $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_PID_FILE) kernel
+
+start-background-broker: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE)
+       $(BASIC_SCRIPT_ENV_SETTINGS) \
+         $(RABBITMQ_SERVER) \
+         $(REDIRECT_STDIO) &
+       ERL_LIBS="$(DIST_ERL_LIBS)" \
+         $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_PID_FILE) && \
+       ERL_LIBS="$(DIST_ERL_LIBS)" \
+         $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) status >/dev/null
+
+start-rabbit-on-node:
+       $(exec_verbose) echo 'rabbit:start().' | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d'
+       $(verbose) ERL_LIBS="$(DIST_ERL_LIBS)" \
+         $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_PID_FILE)
+
+stop-rabbit-on-node:
+       $(exec_verbose) echo 'rabbit:stop().' | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d'
+
+set-resource-alarm:
+       $(exec_verbose) echo 'rabbit_alarm:set_alarm({{resource_limit, $(SOURCE), node()}, []}).' | \
+       $(ERL_CALL) $(ERL_CALL_OPTS)
+
+clear-resource-alarm:
+       $(exec-verbose) echo 'rabbit_alarm:clear_alarm({resource_limit, $(SOURCE), node()}).' | \
+       $(ERL_CALL) $(ERL_CALL_OPTS)
+
+stop-node:
+       $(exec_verbose) ( \
+       pid=$$(test -f $(RABBITMQ_PID_FILE) && cat $(RABBITMQ_PID_FILE)) && \
+       $(ERL_CALL) $(ERL_CALL_OPTS) -q && \
+       while ps -p "$$pid" >/dev/null 2>&1; do sleep 1; done \
+       ) || :
diff --git a/deps/rabbit_common/mk/rabbitmq-tools.mk b/deps/rabbit_common/mk/rabbitmq-tools.mk
new file mode 100644 (file)
index 0000000..769519b
--- /dev/null
@@ -0,0 +1,75 @@
+READY_DEPS = $(foreach DEP,\
+              $(filter $(RABBITMQ_COMPONENTS),$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)), \
+              $(if $(wildcard $(DEPS_DIR)/$(DEP)),$(DEP),))
+
+.PHONY: update-erlang-mk update-rabbitmq-components.mk
+
+update-erlang-mk: erlang-mk
+       $(verbose) if test "$(DO_COMMIT)" = 'yes'; then \
+               git diff --quiet -- erlang.mk \
+               || git commit -m 'Update erlang.mk' -- erlang.mk; \
+       fi
+       $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \
+               ! test -f $$repo/erlang.mk \
+               || $(MAKE) -C $$repo erlang-mk; \
+               if test "$(DO_COMMIT)" = 'yes'; then \
+                       (cd $$repo; \
+                        git diff --quiet -- erlang.mk \
+                        || git commit -m 'Update erlang.mk' -- erlang.mk); \
+               fi; \
+       done
+
+update-rabbitmq-components-mk: rabbitmq-components-mk
+       $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \
+               ! test -f $$repo/rabbitmq-components.mk \
+               || $(MAKE) -C $$repo rabbitmq-components-mk; \
+       done
+
+update-contributor-code-of-conduct:
+       $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \
+               cp $(DEPS_DIR)/rabbit_common/CODE_OF_CONDUCT.md $$repo/CODE_OF_CONDUCT.md; \
+               cp $(DEPS_DIR)/rabbit_common/CONTRIBUTING.md $$repo/CONTRIBUTING.md; \
+       done
+
+ifneq ($(wildcard .git),)
+
+.PHONY: sync-gitremote sync-gituser
+
+sync-gitremote: $(READY_DEPS:%=$(DEPS_DIR)/%+sync-gitremote)
+       @:
+
+%+sync-gitremote:
+       $(exec_verbose) cd $* && \
+               git remote set-url origin \
+               '$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(notdir $*))'
+       $(verbose) cd $* && \
+               git remote set-url --push origin \
+               '$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(notdir $*))'
+
+RMQ_GIT_GLOBAL_USER_NAME := $(shell git config --global user.name)
+RMQ_GIT_GLOBAL_USER_EMAIL := $(shell git config --global user.email)
+RMQ_GIT_USER_NAME := $(shell git config user.name)
+RMQ_GIT_USER_EMAIL := $(shell git config user.email)
+
+sync-gituser: $(READY_DEPS:%=$(DEPS_DIR)/%+sync-gituser)
+       @:
+
+%+sync-gituser:
+ifeq ($(RMQ_GIT_USER_NAME),$(RMQ_GIT_GLOBAL_USER_NAME))
+       $(exec_verbose) cd $* && git config --unset user.name || :
+else
+       $(exec_verbose) cd $* && git config user.name "$(RMQ_GIT_USER_NAME)"
+endif
+ifeq ($(RMQ_GIT_USER_EMAIL),$(RMQ_GIT_GLOBAL_USER_EMAIL))
+       $(verbose) cd $* && git config --unset user.email || :
+else
+       $(verbose) cd $* && git config user.email "$(RMQ_GIT_USER_EMAIL)"
+endif
+
+show-branch: $(READY_DEPS:%=$(DEPS_DIR)/%+show-branch)
+       $(verbose) printf '%-34s %s\n' $(PROJECT): "$$(git symbolic-ref -q --short HEAD || git describe --tags --exact-match)"
+
+%+show-branch:
+       $(verbose) printf '%-34s %s\n' $(notdir $*): "$$(cd $* && (git symbolic-ref -q --short HEAD || git describe --tags --exact-match))"
+
+endif # ($(wildcard .git),)
similarity index 98%
rename from rabbitmq-server/src/app_utils.erl
rename to deps/rabbit_common/src/app_utils.erl
index bab327eab6598fe87d6834fe70f1908f71a9ab4b..6504c3f543ac9bc569cd023c303770254f2fb5d7 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 -module(app_utils).
 
@@ -19,8 +19,6 @@
          stop_applications/1, stop_applications/2, app_dependency_order/2,
          app_dependencies/1]).
 
--ifdef(use_specs).
-
 -type error_handler() :: fun((atom(), any()) -> 'ok').
 
 -spec load_applications([atom()])                   -> 'ok'.
@@ -31,8 +29,6 @@
 -spec app_dependency_order([atom()], boolean())     -> [digraph:vertex()].
 -spec app_dependencies(atom())                      -> [atom()].
 
--endif.
-
 %%---------------------------------------------------------------------------
 %% Public API
 
diff --git a/deps/rabbit_common/src/code_version.erl b/deps/rabbit_common/src/code_version.erl
new file mode 100644 (file)
index 0000000..c6657d8
--- /dev/null
@@ -0,0 +1,258 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Federation.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+-module(code_version).
+
+-export([update/1]).
+
+%%----------------------------------------------------------------------------
+%% API
+%%----------------------------------------------------------------------------
+
+%%----------------------------------------------------------------------------
+%% @doc Reads the abstract code of the given `Module`, modifies it to adapt to
+%% the current Erlang version, compiles and loads the result.
+%% This function finds the current Erlang version and then selects the function
+%% call for that version, removing all other versions declared in the original
+%% beam file. `code_version:update/1` is triggered by the module itself the
+%% first time an affected function is called.
+%%
+%% The purpose of this functionality is to support the new time API introduced
+%% in ERTS 7.0, while providing compatibility with previous versions.
+%%
+%% `Module` must contain an attribute `erlang_version_support` containing a list of
+%% tuples:
+%%
+%% {ErlangVersion, [{OriginalFuntion, Arity, PreErlangVersionFunction,
+%%                   PostErlangVersionFunction}]}
+%%
+%% All these new functions may be exported, and implemented as follows:
+%%
+%% OriginalFunction() ->
+%%    code_version:update(?MODULE),
+%%    ?MODULE:OriginalFunction().
+%%
+%% PostErlangVersionFunction() ->
+%%    %% implementation using new time API
+%%    ..
+%%
+%% PreErlangVersionFunction() ->
+%%    %% implementation using fallback solution
+%%    ..
+%%
+%% See `time_compat.erl` for an example.
+%%
+%% end
+%%----------------------------------------------------------------------------
+-spec update(atom()) -> ok | no_return().
+update(Module) ->
+    AbsCode = get_abs_code(Module),
+    Forms = replace_forms(Module, get_otp_version(), AbsCode),
+    Code = compile_forms(Forms),
+    load_code(Module, Code).
+
+%%----------------------------------------------------------------------------
+%% Internal functions
+%%----------------------------------------------------------------------------
+load_code(Module, Code) ->
+    LockId = {{?MODULE, Module}, self()},
+    FakeFilename = "Loaded by rabbit_common",
+    global:set_lock(LockId, [node()]),
+    case code:which(Module) of
+        FakeFilename ->
+            ok;
+        _ ->
+            unload(Module),
+            case code:load_binary(Module, FakeFilename, Code) of
+                {module, _}     -> ok;
+                {error, Reason} -> throw({cannot_load, Module, Reason})
+            end
+    end,
+    global:del_lock(LockId, [node()]),
+    ok.
+
+unload(Module) ->
+    code:soft_purge(Module),
+    code:delete(Module).
+
+compile_forms(Forms) ->
+    case compile:forms(Forms, [debug_info]) of
+        {ok, _ModName, Code} ->
+            Code;
+        {ok, _ModName, Code, _Warnings} ->
+            Code;
+        Error ->
+            throw({cannot_compile_forms, Error})
+    end.
+
+get_abs_code(Module) ->
+    get_forms(get_object_code(Module)).
+
+get_object_code(Module) ->
+    case code:get_object_code(Module) of
+        {_Mod, Code, _File} ->
+            Code;
+        error ->
+            throw({not_found, Module})
+    end.
+
+get_forms(Code) ->
+    case beam_lib:chunks(Code, [abstract_code]) of
+        {ok, {_, [{abstract_code, {raw_abstract_v1, Forms}}]}} ->
+            Forms;
+        {ok, {Module, [{abstract_code, no_abstract_code}]}} ->
+            throw({no_abstract_code, Module});
+        {error, beam_lib, Reason} ->
+            throw({no_abstract_code, Reason})
+    end.
+
+get_otp_version() ->
+    Version = erlang:system_info(otp_release),
+    case re:run(Version, "^[0-9][0-9]", [{capture, first, list}]) of
+        {match, [V]} ->
+            list_to_integer(V);
+        _ ->
+            %% Could be anything below R17, we are not interested
+            0
+    end.
+
+get_original_pairs(VersionSupport) ->
+    [{Orig, Arity} || {Orig, Arity, _Pre, _Post} <- VersionSupport].
+
+get_delete_pairs(true, VersionSupport) ->
+    [{Pre, Arity} || {_Orig, Arity, Pre, _Post} <- VersionSupport];
+get_delete_pairs(false, VersionSupport) ->
+    [{Post, Arity} || {_Orig, Arity, _Pre, Post} <- VersionSupport].
+
+get_rename_pairs(true, VersionSupport) ->
+    [{Post, Arity} || {_Orig, Arity, _Pre, Post} <- VersionSupport];
+get_rename_pairs(false, VersionSupport) ->
+    [{Pre, Arity} || {_Orig, Arity, Pre, _Post} <- VersionSupport].
+
+%% Pairs of {Renamed, OriginalName} functions
+get_name_pairs(true, VersionSupport) ->
+    [{{Post, Arity}, Orig} || {Orig, Arity, _Pre, Post} <- VersionSupport];
+get_name_pairs(false, VersionSupport) ->
+    [{{Pre, Arity}, Orig} || {Orig, Arity, Pre, _Post} <- VersionSupport].
+
+delete_abstract_functions(ToDelete) ->
+    fun(Tree, Function) ->
+            case lists:member(Function, ToDelete) of
+                true ->
+                    erl_syntax:comment(["Deleted unused function"]);
+                false ->
+                    Tree
+            end
+    end.
+
+rename_abstract_functions(ToRename, ToName) ->
+    fun(Tree, Function) ->
+            case lists:member(Function, ToRename) of
+                true ->
+                    FunctionName = proplists:get_value(Function, ToName),
+                    erl_syntax:function(
+                      erl_syntax:atom(FunctionName),
+                      erl_syntax:function_clauses(Tree));
+                false ->
+                    Tree
+            end
+    end.
+
+replace_forms(Module, ErlangVersion, AbsCode) ->
+    %% Obtain attribute containing the list of functions that must be updated
+    Attr = Module:module_info(attributes),
+    VersionSupport = proplists:get_value(erlang_version_support, Attr),
+    {Pre, Post} = lists:splitwith(fun({Version, _Pairs}) ->
+                                          Version > ErlangVersion
+                                  end, VersionSupport),
+    %% Replace functions in two passes: replace for Erlang versions > current
+    %% first, Erlang versions =< current afterwards.
+    replace_version_forms(
+      true, replace_version_forms(false, AbsCode, get_version_functions(Pre)),
+      get_version_functions(Post)).
+
+get_version_functions(List) ->
+    lists:append([Pairs || {_Version, Pairs} <- List]).
+
+replace_version_forms(IsPost, AbsCode, VersionSupport) ->
+    %% Get pairs of {Function, Arity} for the triggering functions, which
+    %% are also the final function names.
+    Original = get_original_pairs(VersionSupport),
+    %% Get pairs of {Function, Arity} for the unused version
+    ToDelete = get_delete_pairs(IsPost, VersionSupport),
+    %% Delete original functions (those that trigger the code update) and
+    %% the unused version ones
+    DeleteFun = delete_abstract_functions(ToDelete ++ Original),
+    AbsCode0 = replace_function_forms(AbsCode, DeleteFun),
+    %% Get pairs of {Function, Arity} for the current version which must be
+    %% renamed
+    ToRename = get_rename_pairs(IsPost, VersionSupport),
+    %% Get paris of {Renamed, OriginalName} functions
+    ToName = get_name_pairs(IsPost, VersionSupport),
+    %% Rename versioned functions with their final name
+    RenameFun = rename_abstract_functions(ToRename, ToName),
+    %% Remove exports of all versioned functions
+    remove_exports(replace_function_forms(AbsCode0, RenameFun),
+                   ToDelete ++ ToRename).
+
+replace_function_forms(AbsCode, Fun) ->
+    ReplaceFunction =
+        fun(Tree) ->
+                Function = erl_syntax_lib:analyze_function(Tree),
+                Fun(Tree, Function)
+        end,
+    Filter = fun(Tree) ->
+                     case erl_syntax:type(Tree) of
+                         function -> ReplaceFunction(Tree);
+                         _Other -> Tree
+                     end
+             end,
+    fold_syntax_tree(Filter, AbsCode).
+
+filter_export_pairs(Info, ToDelete) ->
+    lists:filter(fun(Pair) ->
+                         not lists:member(Pair, ToDelete)
+                 end, Info).
+
+remove_exports(AbsCode, ToDelete) ->
+    RemoveExports =
+        fun(Tree) ->
+                case erl_syntax_lib:analyze_attribute(Tree) of
+                    {export, Info} ->
+                        Remaining = filter_export_pairs(Info, ToDelete),
+                        rebuild_export(Remaining);
+                    _Other -> Tree
+                end
+        end,
+    Filter = fun(Tree) ->
+                     case erl_syntax:type(Tree) of
+                         attribute -> RemoveExports(Tree);
+                         _Other -> Tree
+                     end
+             end,
+    fold_syntax_tree(Filter, AbsCode).
+
+rebuild_export(Args) ->
+    erl_syntax:attribute(
+      erl_syntax:atom(export),
+      [erl_syntax:list(
+         [erl_syntax:arity_qualifier(erl_syntax:atom(N),
+                                     erl_syntax:integer(A))
+          || {N, A} <- Args])]).
+
+fold_syntax_tree(Filter, Forms) ->
+    Tree = erl_syntax:form_list(Forms),
+    NewTree = erl_syntax_lib:map(Filter, Tree),
+    erl_syntax:revert_forms(NewTree).
similarity index 77%
rename from rabbitmq-server/src/credit_flow.erl
rename to deps/rabbit_common/src/credit_flow.erl
index b9547cff1cf9d3d396f9f1e21270572174e1c3fc..e9b16f4954bfab4a6fc47294fc5ad977e1cfea1e 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(credit_flow).
@@ -47,7 +47,7 @@
 %% client publishes.
 
 -define(DEFAULT_INITIAL_CREDIT, 200).
--define(DEFAULT_MORE_CREDIT_AFTER, 50).
+-define(DEFAULT_MORE_CREDIT_AFTER, 100).
 
 -define(DEFAULT_CREDIT,
         case get(credit_flow_default_credit) of
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([bump_msg/0]).
 
 -opaque(bump_msg() :: {pid(), non_neg_integer()}).
 -type(credit_spec() :: {non_neg_integer(), non_neg_integer()}).
 
--spec(send/1 :: (pid()) -> 'ok').
--spec(send/2 :: (pid(), credit_spec()) -> 'ok').
--spec(ack/1 :: (pid()) -> 'ok').
--spec(ack/2 :: (pid(), credit_spec()) -> 'ok').
--spec(handle_bump_msg/1 :: (bump_msg()) -> 'ok').
--spec(blocked/0 :: () -> boolean()).
--spec(peer_down/1 :: (pid()) -> 'ok').
-
--endif.
+-spec send
+        (pid()) -> 'ok';
+        (credit_spec()) -> 'ok'.
+-spec ack(pid()) -> 'ok'.
+-spec ack(pid(), credit_spec()) -> 'ok'.
+-spec handle_bump_msg(bump_msg()) -> 'ok'.
+-spec blocked() -> boolean().
+-spec peer_down(pid()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
 %% flow".
 -define(STATE_CHANGE_INTERVAL, 1000000).
 
+-ifdef(CREDIT_FLOW_TRACING).
+-define(TRACE_BLOCKED(SELF, FROM), rabbit_event:notify(credit_flow_blocked,
+                                     [{process, SELF},
+                                      {process_info, erlang:process_info(SELF)},
+                                      {from, FROM},
+                                      {from_info, erlang:process_info(FROM)},
+                                      {timestamp,
+                                       time_compat:os_system_time(
+                                         milliseconds)}])).
+-define(TRACE_UNBLOCKED(SELF, FROM), rabbit_event:notify(credit_flow_unblocked,
+                                       [{process, SELF},
+                                        {from, FROM},
+                                        {timestamp,
+                                         time_compat:os_system_time(
+                                           milliseconds)}])).
+-else.
+-define(TRACE_BLOCKED(SELF, FROM), ok).
+-define(TRACE_UNBLOCKED(SELF, FROM), ok).
+-endif.
+
 %%----------------------------------------------------------------------------
 
 %% There are two "flows" here; of messages and of credit, going in
@@ -151,7 +168,10 @@ state() -> case blocked() of
                true  -> flow;
                false -> case get(credit_blocked_at) of
                             undefined -> running;
-                            B         -> Diff = timer:now_diff(erlang:now(), B),
+                            B         -> Now = time_compat:monotonic_time(),
+                                         Diff = time_compat:convert_time_unit(Now - B,
+                                                                              native,
+                                                                              micro_seconds),
                                          case Diff < ?STATE_CHANGE_INTERVAL of
                                              true  -> flow;
                                              false -> running
@@ -178,18 +198,21 @@ grant(To, Quantity) ->
     end.
 
 block(From) ->
+    ?TRACE_BLOCKED(self(), From),
     case blocked() of
-        false -> put(credit_blocked_at, erlang:now());
+        false -> put(credit_blocked_at, time_compat:monotonic_time());
         true  -> ok
     end,
     ?UPDATE(credit_blocked, [], Blocks, [From | Blocks]).
 
 unblock(From) ->
+    ?TRACE_UNBLOCKED(self(), From),
     ?UPDATE(credit_blocked, [], Blocks, Blocks -- [From]),
     case blocked() of
         false -> case erase(credit_deferred) of
                      undefined -> ok;
-                     Credits   -> [To ! Msg || {To, Msg} <- Credits]
+                     Credits   -> _ = [To ! Msg || {To, Msg} <- Credits],
+                                  ok
                  end;
         true  -> ok
     end.
diff --git a/deps/rabbit_common/src/ec_semver.erl b/deps/rabbit_common/src/ec_semver.erl
new file mode 100644 (file)
index 0000000..6ae5597
--- /dev/null
@@ -0,0 +1,730 @@
+%%% vi:ts=4 sw=4 et
+
+%%% Imported from https://github.com/erlware/erlware_commons.git
+%%% Commit 603441a0363d5433de2139759991c640846c3a62
+%%% We export normalize/1 here
+
+%%%-------------------------------------------------------------------
+%%% @copyright (C) 2011, Erlware LLC
+%%% @doc
+%%%  Helper functions for working with semver versioning strings.
+%%%  See http://semver.org/ for the spec.
+%%% @end
+%%%-------------------------------------------------------------------
+-module(ec_semver).
+
+-export([parse/1,
+         format/1,
+         eql/2,
+         gt/2,
+         gte/2,
+         lt/2,
+         lte/2,
+         pes/2,
+         normalize/1,
+         between/3]).
+
+%% For internal use by the ec_semver_parser peg
+-export([internal_parse_version/1]).
+
+-export_type([semver/0,
+              version_string/0,
+              any_version/0]).
+
+%%%===================================================================
+%%% Public Types
+%%%===================================================================
+
+-type version_element() :: non_neg_integer() | binary().
+
+-type major_minor_patch_minpatch() ::
+        version_element()
+      | {version_element(), version_element()}
+      | {version_element(), version_element(), version_element()}
+      | {version_element(), version_element(),
+         version_element(), version_element()}.
+
+-type alpha_part() :: integer() | binary() | string().
+-type alpha_info() :: {PreRelease::[alpha_part()],
+                       BuildVersion::[alpha_part()]}.
+
+-type semver() :: {major_minor_patch_minpatch(), alpha_info()}.
+
+-type version_string() :: string() | binary().
+
+-type any_version() :: version_string() | semver().
+
+%%%===================================================================
+%%% API
+%%%===================================================================
+
+%% @doc parse a string or binary into a valid semver representation
+-spec parse(any_version()) -> semver().
+parse(Version) when erlang:is_list(Version) ->
+    case ec_semver_parser:parse(Version) of
+        {fail, _} ->
+            {erlang:iolist_to_binary(Version), {[],[]}};
+        Good ->
+            Good
+    end;
+parse(Version) when erlang:is_binary(Version) ->
+    case ec_semver_parser:parse(Version) of
+        {fail, _} ->
+            {Version, {[],[]}};
+        Good ->
+            Good
+    end;
+parse(Version) ->
+    Version.
+
+-spec format(semver()) -> iolist().
+format({Maj, {AlphaPart, BuildPart}})
+  when erlang:is_integer(Maj);
+       erlang:is_binary(Maj) ->
+    [format_version_part(Maj),
+     format_vsn_rest(<<"-">>, AlphaPart),
+     format_vsn_rest(<<"+">>, BuildPart)];
+format({{Maj, Min}, {AlphaPart, BuildPart}}) ->
+    [format_version_part(Maj), ".",
+     format_version_part(Min),
+     format_vsn_rest(<<"-">>, AlphaPart),
+     format_vsn_rest(<<"+">>, BuildPart)];
+format({{Maj, Min, Patch}, {AlphaPart, BuildPart}}) ->
+    [format_version_part(Maj), ".",
+     format_version_part(Min), ".",
+     format_version_part(Patch),
+     format_vsn_rest(<<"-">>, AlphaPart),
+     format_vsn_rest(<<"+">>, BuildPart)];
+format({{Maj, Min, Patch, MinPatch}, {AlphaPart, BuildPart}}) ->
+    [format_version_part(Maj), ".",
+     format_version_part(Min), ".",
+     format_version_part(Patch), ".",
+     format_version_part(MinPatch),
+     format_vsn_rest(<<"-">>, AlphaPart),
+     format_vsn_rest(<<"+">>, BuildPart)].
+
+-spec format_version_part(integer() | binary()) -> iolist().
+format_version_part(Vsn)
+  when erlang:is_integer(Vsn) ->
+    erlang:integer_to_list(Vsn);
+format_version_part(Vsn)
+  when erlang:is_binary(Vsn) ->
+    Vsn.
+
+
+
+%% @doc test for quality between semver versions
+-spec eql(any_version(), any_version()) -> boolean().
+eql(VsnA, VsnB) ->
+    NVsnA = normalize(parse(VsnA)),
+    NVsnB = normalize(parse(VsnB)),
+    NVsnA =:= NVsnB.
+
+%% @doc Test that VsnA is greater than VsnB
+-spec gt(any_version(), any_version()) -> boolean().
+gt(VsnA, VsnB) ->
+    {MMPA, {AlphaA, PatchA}} = normalize(parse(VsnA)),
+    {MMPB, {AlphaB, PatchB}} = normalize(parse(VsnB)),
+    ((MMPA > MMPB)
+     orelse
+       ((MMPA =:= MMPB)
+        andalso
+          ((AlphaA =:= [] andalso AlphaB =/= [])
+           orelse
+             ((not (AlphaB =:= [] andalso AlphaA =/= []))
+              andalso
+                (AlphaA > AlphaB))))
+     orelse
+       ((MMPA =:= MMPB)
+        andalso
+          (AlphaA =:= AlphaB)
+        andalso
+          ((PatchB =:= [] andalso PatchA =/= [])
+           orelse
+           PatchA > PatchB))).
+
+%% @doc Test that VsnA is greater than or equal to VsnB
+-spec gte(any_version(), any_version()) -> boolean().
+gte(VsnA, VsnB) ->
+    NVsnA = normalize(parse(VsnA)),
+    NVsnB = normalize(parse(VsnB)),
+    gt(NVsnA, NVsnB) orelse eql(NVsnA, NVsnB).
+
+%% @doc Test that VsnA is less than VsnB
+-spec lt(any_version(), any_version()) -> boolean().
+lt(VsnA, VsnB) ->
+    {MMPA, {AlphaA, PatchA}} = normalize(parse(VsnA)),
+    {MMPB, {AlphaB, PatchB}} = normalize(parse(VsnB)),
+    ((MMPA < MMPB)
+     orelse
+       ((MMPA =:= MMPB)
+        andalso
+          ((AlphaB =:= [] andalso AlphaA =/= [])
+           orelse
+             ((not (AlphaA =:= [] andalso AlphaB =/= []))
+              andalso
+                (AlphaA < AlphaB))))
+     orelse
+       ((MMPA =:= MMPB)
+        andalso
+          (AlphaA =:= AlphaB)
+        andalso
+          ((PatchA =:= [] andalso PatchB =/= [])
+           orelse
+           PatchA < PatchB))).
+
+%% @doc Test that VsnA is less than or equal to VsnB
+-spec lte(any_version(), any_version()) -> boolean().
+lte(VsnA, VsnB) ->
+    NVsnA = normalize(parse(VsnA)),
+    NVsnB = normalize(parse(VsnB)),
+    lt(NVsnA, NVsnB) orelse eql(NVsnA, NVsnB).
+
+%% @doc Test that VsnMatch is greater than or equal to Vsn1 and
+%% less than or equal to Vsn2
+-spec between(any_version(), any_version(), any_version()) -> boolean().
+between(Vsn1, Vsn2, VsnMatch) ->
+    NVsnA = normalize(parse(Vsn1)),
+    NVsnB = normalize(parse(Vsn2)),
+    NVsnMatch = normalize(parse(VsnMatch)),
+    gte(NVsnMatch, NVsnA) andalso
+        lte(NVsnMatch, NVsnB).
+
+%% @doc check that VsnA is Approximately greater than VsnB
+%%
+%% Specifying ">= 2.6.5" is an optimistic version constraint. All
+%% versions greater than the one specified, including major releases
+%% (e.g. 3.0.0) are allowed.
+%%
+%% Conversely, specifying "~> 2.6" is pessimistic about future major
+%% revisions and "~> 2.6.5" is pessimistic about future minor
+%% revisions.
+%%
+%%  "~> 2.6" matches cookbooks >= 2.6.0 AND &lt; 3.0.0
+%% "~> 2.6.5" matches cookbooks >= 2.6.5 AND &lt; 2.7.0
+pes(VsnA, VsnB) ->
+    internal_pes(parse(VsnA), parse(VsnB)).
+
+%%%===================================================================
+%%% Friend Functions
+%%%===================================================================
+%% @doc helper function for the peg grammer to parse the iolist into a semver
+-spec internal_parse_version(iolist()) -> semver().
+internal_parse_version([MMP, AlphaPart, BuildPart, _]) ->
+    {parse_major_minor_patch_minpatch(MMP), {parse_alpha_part(AlphaPart),
+                                             parse_alpha_part(BuildPart)}}.
+
+%% @doc helper function for the peg grammer to parse the iolist into a major_minor_patch
+-spec parse_major_minor_patch_minpatch(iolist()) -> major_minor_patch_minpatch().
+parse_major_minor_patch_minpatch([MajVsn, [], [], []]) ->
+    strip_maj_version(MajVsn);
+parse_major_minor_patch_minpatch([MajVsn, [<<".">>, MinVsn], [], []]) ->
+    {strip_maj_version(MajVsn), MinVsn};
+parse_major_minor_patch_minpatch([MajVsn,
+                                  [<<".">>, MinVsn],
+                                  [<<".">>, PatchVsn], []]) ->
+    {strip_maj_version(MajVsn), MinVsn, PatchVsn};
+parse_major_minor_patch_minpatch([MajVsn,
+                                  [<<".">>, MinVsn],
+                                  [<<".">>, PatchVsn],
+                                  [<<".">>, MinPatch]]) ->
+    {strip_maj_version(MajVsn), MinVsn, PatchVsn, MinPatch}.
+
+%% @doc helper function for the peg grammer to parse the iolist into an alpha part
+-spec parse_alpha_part(iolist()) -> [alpha_part()].
+parse_alpha_part([]) ->
+    [];
+parse_alpha_part([_, AV1, Rest]) ->
+    [erlang:iolist_to_binary(AV1) |
+     [format_alpha_part(Part) || Part <- Rest]].
+
+%% @doc according to semver alpha parts that can be treated like
+%% numbers must be. We implement that here by taking the alpha part
+%% and trying to convert it to a number, if it succeeds we use
+%% it. Otherwise we do not.
+-spec format_alpha_part(iolist()) -> integer() | binary().
+format_alpha_part([<<".">>, AlphaPart]) ->
+    Bin = erlang:iolist_to_binary(AlphaPart),
+    try
+        erlang:list_to_integer(erlang:binary_to_list(Bin))
+    catch
+        error:badarg ->
+            Bin
+    end.
+
+%%%===================================================================
+%%% Internal Functions
+%%%===================================================================
+-spec strip_maj_version(iolist()) -> version_element().
+strip_maj_version([<<"v">>, MajVsn]) ->
+    MajVsn;
+strip_maj_version([[], MajVsn]) ->
+    MajVsn;
+strip_maj_version(MajVsn) ->
+    MajVsn.
+
+-spec to_list(integer() | binary() | string()) -> string() | binary().
+to_list(Detail) when erlang:is_integer(Detail) ->
+    erlang:integer_to_list(Detail);
+to_list(Detail) when erlang:is_list(Detail); erlang:is_binary(Detail) ->
+    Detail.
+
+-spec format_vsn_rest(binary() | string(), [integer() | binary()]) -> iolist().
+format_vsn_rest(_TypeMark, []) ->
+    [];
+format_vsn_rest(TypeMark, [Head | Rest]) ->
+    [TypeMark, Head |
+     [[".", to_list(Detail)] || Detail <- Rest]].
+
+%% @doc normalize the semver so they can be compared
+-spec normalize(semver()) -> semver().
+normalize({Vsn, Rest})
+  when erlang:is_binary(Vsn);
+       erlang:is_integer(Vsn) ->
+    {{Vsn, 0, 0, 0}, Rest};
+normalize({{Maj, Min}, Rest}) ->
+    {{Maj, Min, 0, 0}, Rest};
+normalize({{Maj, Min, Patch}, Rest}) ->
+    {{Maj, Min, Patch, 0}, Rest};
+normalize(Other = {{_, _, _, _}, {_,_}}) ->
+    Other.
+
+%% @doc to do the pessimistic compare we need a parsed semver. This is
+%% the internal implementation of the of the pessimistic run. The
+%% external just ensures that versions are parsed.
+-spec internal_pes(semver(), semver()) -> boolean().
+internal_pes(VsnA, {{LM, LMI}, _})
+  when erlang:is_integer(LM),
+       erlang:is_integer(LMI) ->
+    gte(VsnA, {{LM, LMI, 0}, {[], []}}) andalso
+        lt(VsnA, {{LM + 1, 0, 0, 0}, {[], []}});
+internal_pes(VsnA, {{LM, LMI, LP}, _})
+    when erlang:is_integer(LM),
+         erlang:is_integer(LMI),
+         erlang:is_integer(LP) ->
+    gte(VsnA, {{LM, LMI, LP}, {[], []}})
+        andalso
+        lt(VsnA, {{LM, LMI + 1, 0, 0}, {[], []}});
+internal_pes(VsnA, {{LM, LMI, LP, LMP}, _})
+    when erlang:is_integer(LM),
+         erlang:is_integer(LMI),
+         erlang:is_integer(LP),
+         erlang:is_integer(LMP) ->
+    gte(VsnA, {{LM, LMI, LP, LMP}, {[], []}})
+        andalso
+        lt(VsnA, {{LM, LMI, LP + 1, 0}, {[], []}});
+internal_pes(Vsn, LVsn) ->
+    gte(Vsn, LVsn).
+
+%%%===================================================================
+%%% Test Functions
+%%%===================================================================
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+eql_test() ->
+    ?assertMatch(true, eql("1.0.0-alpha",
+                           "1.0.0-alpha")),
+    ?assertMatch(true, eql(<<"1.0.0-alpha">>,
+                           "1.0.0-alpha")),
+    ?assertMatch(true, eql("1.0.0-alpha",
+                           <<"1.0.0-alpha">>)),
+    ?assertMatch(true, eql(<<"1.0.0-alpha">>,
+                           <<"1.0.0-alpha">>)),
+    ?assertMatch(true, eql("v1.0.0-alpha",
+                           "1.0.0-alpha")),
+    ?assertMatch(true, eql("1",
+                           "1.0.0")),
+    ?assertMatch(true, eql("v1",
+                           "v1.0.0")),
+    ?assertMatch(true, eql("1.0",
+                           "1.0.0")),
+    ?assertMatch(true, eql("1.0.0",
+                           "1")),
+    ?assertMatch(true, eql("1.0.0.0",
+                           "1")),
+    ?assertMatch(true, eql("1.0+alpha.1",
+                           "1.0.0+alpha.1")),
+    ?assertMatch(true, eql("1.0-alpha.1+build.1",
+                           "1.0.0-alpha.1+build.1")),
+    ?assertMatch(true, eql("1.0-alpha.1+build.1",
+                           "1.0.0.0-alpha.1+build.1")),
+    ?assertMatch(true, eql("1.0-alpha.1+build.1",
+                           "v1.0.0.0-alpha.1+build.1")),
+    ?assertMatch(true, eql("aa", "aa")),
+    ?assertMatch(true, eql("AA.BB", "AA.BB")),
+    ?assertMatch(true, eql("BBB-super", "BBB-super")),
+    ?assertMatch(true, not eql("1.0.0",
+                               "1.0.1")),
+    ?assertMatch(true, not eql(<<"1.0.0">>,
+                               "1.0.1")),
+    ?assertMatch(true, not eql("1.0.0",
+                               <<"1.0.1">>)),
+    ?assertMatch(true, not eql(<<"1.0.0">>,
+                               <<"1.0.1">>)),
+    ?assertMatch(true, not eql("1.0.0-alpha",
+                               "1.0.1+alpha")),
+    ?assertMatch(true, not eql("1.0.0+build.1",
+                               "1.0.1+build.2")),
+    ?assertMatch(true, not eql("1.0.0.0+build.1",
+                               "1.0.0.1+build.2")),
+    ?assertMatch(true, not eql("FFF", "BBB")),
+    ?assertMatch(true, not eql("1", "1BBBB")).
+
+
+gt_test() ->
+    ?assertMatch(true, gt("1.0.0-alpha.1",
+                          "1.0.0-alpha")),
+    ?assertMatch(true, gt("1.0.0.1-alpha.1",
+                          "1.0.0.1-alpha")),
+    ?assertMatch(true, gt("1.0.0.4-alpha.1",
+                          "1.0.0.2-alpha")),
+    ?assertMatch(true, gt("1.0.0.0-alpha.1",
+                          "1.0.0-alpha")),
+    ?assertMatch(true, gt("1.0.0-beta.2",
+                          "1.0.0-alpha.1")),
+    ?assertMatch(true, gt("1.0.0-beta.11",
+                          "1.0.0-beta.2")),
+    ?assertMatch(true, gt("1.0.0-beta.11",
+                          "1.0.0.0-beta.2")),
+    ?assertMatch(true, gt("1.0.0-rc.1", "1.0.0-beta.11")),
+    ?assertMatch(true, gt("1.0.0-rc.1+build.1", "1.0.0-rc.1")),
+    ?assertMatch(true, gt("1.0.0", "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, gt("1.0.0+0.3.7", "1.0.0")),
+    ?assertMatch(true, gt("1.3.7+build", "1.0.0+0.3.7")),
+    ?assertMatch(true, gt("1.3.7+build.2.b8f12d7",
+                          "1.3.7+build")),
+    ?assertMatch(true, gt("1.3.7+build.2.b8f12d7",
+                          "1.3.7.0+build")),
+    ?assertMatch(true, gt("1.3.7+build.11.e0f985a",
+                          "1.3.7+build.2.b8f12d7")),
+    ?assertMatch(true, gt("aa.cc",
+                          "aa.bb")),
+    ?assertMatch(true, not gt("1.0.0-alpha",
+                              "1.0.0-alpha.1")),
+    ?assertMatch(true, not gt("1.0.0-alpha",
+                              "1.0.0.0-alpha.1")),
+    ?assertMatch(true, not gt("1.0.0-alpha.1",
+                              "1.0.0-beta.2")),
+    ?assertMatch(true, not gt("1.0.0-beta.2",
+                              "1.0.0-beta.11")),
+    ?assertMatch(true, not gt("1.0.0-beta.11",
+                              "1.0.0-rc.1")),
+    ?assertMatch(true, not gt("1.0.0-rc.1",
+                              "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, not gt("1.0.0-rc.1+build.1",
+                              "1.0.0")),
+    ?assertMatch(true, not gt("1.0.0",
+                              "1.0.0+0.3.7")),
+    ?assertMatch(true, not gt("1.0.0+0.3.7",
+                              "1.3.7+build")),
+    ?assertMatch(true, not gt("1.3.7+build",
+                              "1.3.7+build.2.b8f12d7")),
+    ?assertMatch(true, not gt("1.3.7+build.2.b8f12d7",
+                              "1.3.7+build.11.e0f985a")),
+    ?assertMatch(true, not gt("1.0.0-alpha",
+                              "1.0.0-alpha")),
+    ?assertMatch(true, not gt("1",
+                              "1.0.0")),
+    ?assertMatch(true, not gt("aa.bb",
+                              "aa.bb")),
+    ?assertMatch(true, not gt("aa.cc",
+                              "aa.dd")),
+    ?assertMatch(true, not gt("1.0",
+                              "1.0.0")),
+    ?assertMatch(true, not gt("1.0.0",
+                              "1")),
+    ?assertMatch(true, not gt("1.0+alpha.1",
+                              "1.0.0+alpha.1")),
+    ?assertMatch(true, not gt("1.0-alpha.1+build.1",
+                              "1.0.0-alpha.1+build.1")).
+
+lt_test() ->
+    ?assertMatch(true, lt("1.0.0-alpha",
+                          "1.0.0-alpha.1")),
+    ?assertMatch(true, lt("1.0.0-alpha",
+                          "1.0.0.0-alpha.1")),
+    ?assertMatch(true, lt("1.0.0-alpha.1",
+                          "1.0.0-beta.2")),
+    ?assertMatch(true, lt("1.0.0-beta.2",
+                          "1.0.0-beta.11")),
+    ?assertMatch(true, lt("1.0.0-beta.11",
+                          "1.0.0-rc.1")),
+    ?assertMatch(true, lt("1.0.0.1-beta.11",
+                          "1.0.0.1-rc.1")),
+    ?assertMatch(true, lt("1.0.0-rc.1",
+                          "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, lt("1.0.0-rc.1+build.1",
+                          "1.0.0")),
+    ?assertMatch(true, lt("1.0.0",
+                          "1.0.0+0.3.7")),
+    ?assertMatch(true, lt("1.0.0+0.3.7",
+                          "1.3.7+build")),
+    ?assertMatch(true, lt("1.3.7+build",
+                          "1.3.7+build.2.b8f12d7")),
+    ?assertMatch(true, lt("1.3.7+build.2.b8f12d7",
+                          "1.3.7+build.11.e0f985a")),
+    ?assertMatch(true, not lt("1.0.0-alpha",
+                              "1.0.0-alpha")),
+    ?assertMatch(true, not lt("1",
+                              "1.0.0")),
+    ?assertMatch(true, lt("1",
+                          "1.0.0.1")),
+    ?assertMatch(true, lt("AA.DD",
+                          "AA.EE")),
+    ?assertMatch(true, not lt("1.0",
+                              "1.0.0")),
+    ?assertMatch(true, not lt("1.0.0.0",
+                              "1")),
+    ?assertMatch(true, not lt("1.0+alpha.1",
+                              "1.0.0+alpha.1")),
+    ?assertMatch(true, not lt("AA.DD", "AA.CC")),
+    ?assertMatch(true, not lt("1.0-alpha.1+build.1",
+                              "1.0.0-alpha.1+build.1")),
+    ?assertMatch(true, not lt("1.0.0-alpha.1",
+                              "1.0.0-alpha")),
+    ?assertMatch(true, not lt("1.0.0-beta.2",
+                              "1.0.0-alpha.1")),
+    ?assertMatch(true, not lt("1.0.0-beta.11",
+                              "1.0.0-beta.2")),
+    ?assertMatch(true, not lt("1.0.0-rc.1", "1.0.0-beta.11")),
+    ?assertMatch(true, not lt("1.0.0-rc.1+build.1", "1.0.0-rc.1")),
+    ?assertMatch(true, not lt("1.0.0", "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, not lt("1.0.0+0.3.7", "1.0.0")),
+    ?assertMatch(true, not lt("1.3.7+build", "1.0.0+0.3.7")),
+    ?assertMatch(true, not lt("1.3.7+build.2.b8f12d7",
+                              "1.3.7+build")),
+    ?assertMatch(true, not lt("1.3.7+build.11.e0f985a",
+                              "1.3.7+build.2.b8f12d7")).
+
+gte_test() ->
+    ?assertMatch(true, gte("1.0.0-alpha",
+                           "1.0.0-alpha")),
+
+    ?assertMatch(true, gte("1",
+                           "1.0.0")),
+
+    ?assertMatch(true, gte("1.0",
+                           "1.0.0")),
+
+    ?assertMatch(true, gte("1.0.0",
+                           "1")),
+
+    ?assertMatch(true, gte("1.0.0.0",
+                           "1")),
+
+    ?assertMatch(true, gte("1.0+alpha.1",
+                           "1.0.0+alpha.1")),
+
+    ?assertMatch(true, gte("1.0-alpha.1+build.1",
+                           "1.0.0-alpha.1+build.1")),
+
+    ?assertMatch(true, gte("1.0.0-alpha.1+build.1",
+                           "1.0.0.0-alpha.1+build.1")),
+    ?assertMatch(true, gte("1.0.0-alpha.1",
+                           "1.0.0-alpha")),
+    ?assertMatch(true, gte("1.0.0-beta.2",
+                           "1.0.0-alpha.1")),
+    ?assertMatch(true, gte("1.0.0-beta.11",
+                           "1.0.0-beta.2")),
+    ?assertMatch(true, gte("aa.bb", "aa.bb")),
+    ?assertMatch(true, gte("dd", "aa")),
+    ?assertMatch(true, gte("1.0.0-rc.1", "1.0.0-beta.11")),
+    ?assertMatch(true, gte("1.0.0-rc.1+build.1", "1.0.0-rc.1")),
+    ?assertMatch(true, gte("1.0.0", "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, gte("1.0.0+0.3.7", "1.0.0")),
+    ?assertMatch(true, gte("1.3.7+build", "1.0.0+0.3.7")),
+    ?assertMatch(true, gte("1.3.7+build.2.b8f12d7",
+                           "1.3.7+build")),
+    ?assertMatch(true, gte("1.3.7+build.11.e0f985a",
+                           "1.3.7+build.2.b8f12d7")),
+    ?assertMatch(true, not gte("1.0.0-alpha",
+                               "1.0.0-alpha.1")),
+    ?assertMatch(true, not gte("CC", "DD")),
+    ?assertMatch(true, not gte("1.0.0-alpha.1",
+                               "1.0.0-beta.2")),
+    ?assertMatch(true, not gte("1.0.0-beta.2",
+                               "1.0.0-beta.11")),
+    ?assertMatch(true, not gte("1.0.0-beta.11",
+                               "1.0.0-rc.1")),
+    ?assertMatch(true, not gte("1.0.0-rc.1",
+                               "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, not gte("1.0.0-rc.1+build.1",
+                               "1.0.0")),
+    ?assertMatch(true, not gte("1.0.0",
+                               "1.0.0+0.3.7")),
+    ?assertMatch(true, not gte("1.0.0+0.3.7",
+                               "1.3.7+build")),
+    ?assertMatch(true, not gte("1.0.0",
+                               "1.0.0+build.1")),
+    ?assertMatch(true, not gte("1.3.7+build",
+                               "1.3.7+build.2.b8f12d7")),
+    ?assertMatch(true, not gte("1.3.7+build.2.b8f12d7",
+                               "1.3.7+build.11.e0f985a")).
+lte_test() ->
+    ?assertMatch(true, lte("1.0.0-alpha",
+                           "1.0.0-alpha.1")),
+    ?assertMatch(true, lte("1.0.0-alpha.1",
+                           "1.0.0-beta.2")),
+    ?assertMatch(true, lte("1.0.0-beta.2",
+                           "1.0.0-beta.11")),
+    ?assertMatch(true, lte("1.0.0-beta.11",
+                           "1.0.0-rc.1")),
+    ?assertMatch(true, lte("1.0.0-rc.1",
+                           "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, lte("1.0.0-rc.1+build.1",
+                           "1.0.0")),
+    ?assertMatch(true, lte("1.0.0",
+                           "1.0.0+0.3.7")),
+    ?assertMatch(true, lte("1.0.0+0.3.7",
+                           "1.3.7+build")),
+    ?assertMatch(true, lte("1.3.7+build",
+                           "1.3.7+build.2.b8f12d7")),
+    ?assertMatch(true, lte("1.3.7+build.2.b8f12d7",
+                           "1.3.7+build.11.e0f985a")),
+    ?assertMatch(true, lte("1.0.0-alpha",
+                           "1.0.0-alpha")),
+    ?assertMatch(true, lte("1",
+                           "1.0.0")),
+    ?assertMatch(true, lte("1.0",
+                           "1.0.0")),
+    ?assertMatch(true, lte("1.0.0",
+                           "1")),
+    ?assertMatch(true, lte("1.0+alpha.1",
+                           "1.0.0+alpha.1")),
+    ?assertMatch(true, lte("1.0.0.0+alpha.1",
+                           "1.0.0+alpha.1")),
+    ?assertMatch(true, lte("1.0-alpha.1+build.1",
+                           "1.0.0-alpha.1+build.1")),
+    ?assertMatch(true, lte("aa","cc")),
+    ?assertMatch(true, lte("cc","cc")),
+    ?assertMatch(true, not lte("1.0.0-alpha.1",
+                              "1.0.0-alpha")),
+    ?assertMatch(true, not lte("cc", "aa")),
+    ?assertMatch(true, not lte("1.0.0-beta.2",
+                              "1.0.0-alpha.1")),
+    ?assertMatch(true, not lte("1.0.0-beta.11",
+                              "1.0.0-beta.2")),
+    ?assertMatch(true, not lte("1.0.0-rc.1", "1.0.0-beta.11")),
+    ?assertMatch(true, not lte("1.0.0-rc.1+build.1", "1.0.0-rc.1")),
+    ?assertMatch(true, not lte("1.0.0", "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, not lte("1.0.0+0.3.7", "1.0.0")),
+    ?assertMatch(true, not lte("1.3.7+build", "1.0.0+0.3.7")),
+    ?assertMatch(true, not lte("1.3.7+build.2.b8f12d7",
+                              "1.3.7+build")),
+    ?assertMatch(true, not lte("1.3.7+build.11.e0f985a",
+                              "1.3.7+build.2.b8f12d7")).
+
+between_test() ->
+    ?assertMatch(true, between("1.0.0-alpha",
+                               "1.0.0-alpha.3",
+                               "1.0.0-alpha.2")),
+    ?assertMatch(true, between("1.0.0-alpha.1",
+                               "1.0.0-beta.2",
+                               "1.0.0-alpha.25")),
+    ?assertMatch(true, between("1.0.0-beta.2",
+                               "1.0.0-beta.11",
+                               "1.0.0-beta.7")),
+    ?assertMatch(true, between("1.0.0-beta.11",
+                               "1.0.0-rc.3",
+                               "1.0.0-rc.1")),
+    ?assertMatch(true, between("1.0.0-rc.1",
+                               "1.0.0-rc.1+build.3",
+                               "1.0.0-rc.1+build.1")),
+
+    ?assertMatch(true, between("1.0.0.0-rc.1",
+                               "1.0.0-rc.1+build.3",
+                               "1.0.0-rc.1+build.1")),
+    ?assertMatch(true, between("1.0.0-rc.1+build.1",
+                               "1.0.0",
+                               "1.0.0-rc.33")),
+    ?assertMatch(true, between("1.0.0",
+                               "1.0.0+0.3.7",
+                               "1.0.0+0.2")),
+    ?assertMatch(true, between("1.0.0+0.3.7",
+                               "1.3.7+build",
+                               "1.2")),
+    ?assertMatch(true, between("1.3.7+build",
+                               "1.3.7+build.2.b8f12d7",
+                               "1.3.7+build.1")),
+    ?assertMatch(true, between("1.3.7+build.2.b8f12d7",
+                               "1.3.7+build.11.e0f985a",
+                               "1.3.7+build.10.a36faa")),
+    ?assertMatch(true, between("1.0.0-alpha",
+                               "1.0.0-alpha",
+                               "1.0.0-alpha")),
+    ?assertMatch(true, between("1",
+                               "1.0.0",
+                               "1.0.0")),
+    ?assertMatch(true, between("1.0",
+                               "1.0.0",
+                               "1.0.0")),
+
+    ?assertMatch(true, between("1.0",
+                               "1.0.0.0",
+                               "1.0.0.0")),
+    ?assertMatch(true, between("1.0.0",
+                               "1",
+                               "1")),
+    ?assertMatch(true, between("1.0+alpha.1",
+                               "1.0.0+alpha.1",
+                               "1.0.0+alpha.1")),
+    ?assertMatch(true, between("1.0-alpha.1+build.1",
+                               "1.0.0-alpha.1+build.1",
+                               "1.0.0-alpha.1+build.1")),
+    ?assertMatch(true, between("aaa",
+                               "ddd",
+                               "cc")),
+    ?assertMatch(true, not between("1.0.0-alpha.1",
+                                   "1.0.0-alpha.22",
+                                   "1.0.0")),
+    ?assertMatch(true, not between("1.0.0",
+                                   "1.0.0-alpha.1",
+                                   "2.0")),
+    ?assertMatch(true, not between("1.0.0-beta.1",
+                                   "1.0.0-beta.11",
+                                   "1.0.0-alpha")),
+    ?assertMatch(true, not between("1.0.0-beta.11", "1.0.0-rc.1",
+                                   "1.0.0-rc.22")),
+    ?assertMatch(true, not between("aaa", "ddd", "zzz")).
+
+pes_test() ->
+    ?assertMatch(true, pes("2.6.0", "2.6")),
+    ?assertMatch(true, pes("2.7", "2.6")),
+    ?assertMatch(true, pes("2.8", "2.6")),
+    ?assertMatch(true, pes("2.9", "2.6")),
+    ?assertMatch(true, pes("A.B", "A.A")),
+    ?assertMatch(true, not pes("3.0.0", "2.6")),
+    ?assertMatch(true, not pes("2.5", "2.6")),
+    ?assertMatch(true, pes("2.6.5", "2.6.5")),
+    ?assertMatch(true, pes("2.6.6", "2.6.5")),
+    ?assertMatch(true, pes("2.6.7", "2.6.5")),
+    ?assertMatch(true, pes("2.6.8", "2.6.5")),
+    ?assertMatch(true, pes("2.6.9", "2.6.5")),
+    ?assertMatch(true, pes("2.6.0.9", "2.6.0.5")),
+    ?assertMatch(true, not pes("2.7", "2.6.5")),
+    ?assertMatch(true, not pes("2.1.7", "2.1.6.5")),
+    ?assertMatch(true, not pes("A.A", "A.B")),
+    ?assertMatch(true, not pes("2.5", "2.6.5")).
+
+version_format_test() ->
+    ?assertEqual(["1", [], []], format({1, {[],[]}})),
+    ?assertEqual(["1", ".", "2", ".", "34", [], []], format({{1,2,34},{[],[]}})),
+    ?assertEqual(<<"a">>, erlang:iolist_to_binary(format({<<"a">>, {[],[]}}))),
+    ?assertEqual(<<"a.b">>, erlang:iolist_to_binary(format({{<<"a">>,<<"b">>}, {[],[]}}))),
+    ?assertEqual(<<"1">>, erlang:iolist_to_binary(format({1, {[],[]}}))),
+    ?assertEqual(<<"1.2">>, erlang:iolist_to_binary(format({{1,2}, {[],[]}}))),
+    ?assertEqual(<<"1.2.2">>, erlang:iolist_to_binary(format({{1,2,2}, {[],[]}}))),
+    ?assertEqual(<<"1.99.2">>, erlang:iolist_to_binary(format({{1,99,2}, {[],[]}}))),
+    ?assertEqual(<<"1.99.2-alpha">>, erlang:iolist_to_binary(format({{1,99,2}, {[<<"alpha">>],[]}}))),
+    ?assertEqual(<<"1.99.2-alpha.1">>, erlang:iolist_to_binary(format({{1,99,2}, {[<<"alpha">>,1], []}}))),
+    ?assertEqual(<<"1.99.2+build.1.a36">>,
+                 erlang:iolist_to_binary(format({{1,99,2}, {[], [<<"build">>, 1, <<"a36">>]}}))),
+    ?assertEqual(<<"1.99.2.44+build.1.a36">>,
+                 erlang:iolist_to_binary(format({{1,99,2,44}, {[], [<<"build">>, 1, <<"a36">>]}}))),
+    ?assertEqual(<<"1.99.2-alpha.1+build.1.a36">>,
+                 erlang:iolist_to_binary(format({{1,99,2}, {[<<"alpha">>, 1], [<<"build">>, 1, <<"a36">>]}}))),
+    ?assertEqual(<<"1">>, erlang:iolist_to_binary(format({1, {[],[]}}))).
+
+-endif.
diff --git a/deps/rabbit_common/src/ec_semver_parser.erl b/deps/rabbit_common/src/ec_semver_parser.erl
new file mode 100644 (file)
index 0000000..ad95fe5
--- /dev/null
@@ -0,0 +1,306 @@
+%%% Imported from https://github.com/erlware/erlware_commons.git
+%%% Commit 603441a0363d5433de2139759991c640846c3a62
+
+-module(ec_semver_parser).
+-export([parse/1,file/1]).
+-define(p_anything,true).
+-define(p_charclass,true).
+-define(p_choose,true).
+-define(p_not,true).
+-define(p_one_or_more,true).
+-define(p_optional,true).
+-define(p_scan,true).
+-define(p_seq,true).
+-define(p_string,true).
+-define(p_zero_or_more,true).
+
+
+-compile(export_all).
+-spec file(file:name()) -> any().
+file(Filename) -> case file:read_file(Filename) of {ok,Bin} -> parse(Bin); Err -> Err end.
+
+-spec parse(binary() | list()) -> any().
+parse(List) when is_list(List) -> parse(unicode:characters_to_binary(List));
+parse(Input) when is_binary(Input) ->
+  _ = setup_memo(),
+  Result = case 'semver'(Input,{{line,1},{column,1}}) of
+             {AST, <<>>, _Index} -> AST;
+             Any -> Any
+           end,
+  release_memo(), Result.
+
+-spec 'semver'(input(), index()) -> parse_result().
+'semver'(Input, Index) ->
+  p(Input, Index, 'semver', fun(I,D) -> (p_seq([fun 'major_minor_patch_min_patch'/2, p_optional(p_seq([p_string(<<"-">>), fun 'alpha_part'/2, p_zero_or_more(p_seq([p_string(<<".">>), fun 'alpha_part'/2]))])), p_optional(p_seq([p_string(<<"+">>), fun 'alpha_part'/2, p_zero_or_more(p_seq([p_string(<<".">>), fun 'alpha_part'/2]))])), p_not(p_anything())]))(I,D) end, fun(Node, _Idx) -> ec_semver:internal_parse_version(Node)  end).
+
+-spec 'major_minor_patch_min_patch'(input(), index()) -> parse_result().
+'major_minor_patch_min_patch'(Input, Index) ->
+  p(Input, Index, 'major_minor_patch_min_patch', fun(I,D) -> (p_seq([p_choose([p_seq([p_optional(p_string(<<"v">>)), fun 'numeric_part'/2]), fun 'alpha_part'/2]), p_optional(p_seq([p_string(<<".">>), fun 'version_part'/2])), p_optional(p_seq([p_string(<<".">>), fun 'version_part'/2])), p_optional(p_seq([p_string(<<".">>), fun 'version_part'/2]))]))(I,D) end, fun(Node, Idx) ->transform('major_minor_patch_min_patch', Node, Idx) end).
+
+-spec 'version_part'(input(), index()) -> parse_result().
+'version_part'(Input, Index) ->
+  p(Input, Index, 'version_part', fun(I,D) -> (p_choose([fun 'numeric_part'/2, fun 'alpha_part'/2]))(I,D) end, fun(Node, Idx) ->transform('version_part', Node, Idx) end).
+
+-spec 'numeric_part'(input(), index()) -> parse_result().
+'numeric_part'(Input, Index) ->
+  p(Input, Index, 'numeric_part', fun(I,D) -> (p_one_or_more(p_charclass(<<"[0-9]">>)))(I,D) end, fun(Node, _Idx) ->erlang:list_to_integer(erlang:binary_to_list(erlang:iolist_to_binary(Node))) end).
+
+-spec 'alpha_part'(input(), index()) -> parse_result().
+'alpha_part'(Input, Index) ->
+  p(Input, Index, 'alpha_part', fun(I,D) -> (p_one_or_more(p_charclass(<<"[A-Za-z0-9]">>)))(I,D) end, fun(Node, _Idx) ->erlang:iolist_to_binary(Node) end).
+
+
+transform(_,Node,_Index) -> Node.
+-file("peg_includes.hrl", 1).
+-type index() :: {{line, pos_integer()}, {column, pos_integer()}}.
+-type input() :: binary().
+-type parse_failure() :: {fail, term()}.
+-type parse_success() :: {term(), input(), index()}.
+-type parse_result() :: parse_failure() | parse_success().
+-type parse_fun() :: fun((input(), index()) -> parse_result()).
+-type xform_fun() :: fun((input(), index()) -> term()).
+
+-spec p(input(), index(), atom(), parse_fun(), xform_fun()) -> parse_result().
+p(Inp, StartIndex, Name, ParseFun, TransformFun) ->
+  case get_memo(StartIndex, Name) of      % See if the current reduction is memoized
+    {ok, Memo} -> %Memo;                     % If it is, return the stored result
+      Memo;
+    _ ->                                        % If not, attempt to parse
+      Result = case ParseFun(Inp, StartIndex) of
+        {fail,_} = Failure ->                       % If it fails, memoize the failure
+          Failure;
+        {Match, InpRem, NewIndex} ->               % If it passes, transform and memoize the result.
+          Transformed = TransformFun(Match, StartIndex),
+          {Transformed, InpRem, NewIndex}
+      end,
+      memoize(StartIndex, Name, Result),
+      Result
+  end.
+
+-spec setup_memo() -> ets:tid().
+setup_memo() ->
+  put({parse_memo_table, ?MODULE}, ets:new(?MODULE, [set])).
+
+-spec release_memo() -> true.
+release_memo() ->
+  ets:delete(memo_table_name()).
+
+-spec memoize(index(), atom(), parse_result()) -> true.
+memoize(Index, Name, Result) ->
+  Memo = case ets:lookup(memo_table_name(), Index) of
+              [] -> [];
+              [{Index, Plist}] -> Plist
+         end,
+  ets:insert(memo_table_name(), {Index, [{Name, Result}|Memo]}).
+
+-spec get_memo(index(), atom()) -> {ok, term()} | {error, not_found}.
+get_memo(Index, Name) ->
+  case ets:lookup(memo_table_name(), Index) of
+    [] -> {error, not_found};
+    [{Index, Plist}] ->
+      case proplists:lookup(Name, Plist) of
+        {Name, Result}  -> {ok, Result};
+        _  -> {error, not_found}
+      end
+    end.
+
+-spec memo_table_name() -> ets:tid().
+memo_table_name() ->
+    get({parse_memo_table, ?MODULE}).
+
+-ifdef(p_eof).
+-spec p_eof() -> parse_fun().
+p_eof() ->
+  fun(<<>>, Index) -> {eof, [], Index};
+     (_, Index) -> {fail, {expected, eof, Index}} end.
+-endif.
+
+-ifdef(p_optional).
+-spec p_optional(parse_fun()) -> parse_fun().
+p_optional(P) ->
+  fun(Input, Index) ->
+      case P(Input, Index) of
+        {fail,_} -> {[], Input, Index};
+        {_, _, _} = Success -> Success
+      end
+  end.
+-endif.
+
+-ifdef(p_not).
+-spec p_not(parse_fun()) -> parse_fun().
+p_not(P) ->
+  fun(Input, Index)->
+      case P(Input,Index) of
+        {fail,_} ->
+          {[], Input, Index};
+        {Result, _, _} -> {fail, {expected, {no_match, Result},Index}}
+      end
+  end.
+-endif.
+
+-ifdef(p_assert).
+-spec p_assert(parse_fun()) -> parse_fun().
+p_assert(P) ->
+  fun(Input,Index) ->
+      case P(Input,Index) of
+        {fail,_} = Failure-> Failure;
+        _ -> {[], Input, Index}
+      end
+  end.
+-endif.
+
+-ifdef(p_seq).
+-spec p_seq([parse_fun()]) -> parse_fun().
+p_seq(P) ->
+  fun(Input, Index) ->
+      p_all(P, Input, Index, [])
+  end.
+
+-spec p_all([parse_fun()], input(), index(), [term()]) -> parse_result().
+p_all([], Inp, Index, Accum ) -> {lists:reverse( Accum ), Inp, Index};
+p_all([P|Parsers], Inp, Index, Accum) ->
+  case P(Inp, Index) of
+    {fail, _} = Failure -> Failure;
+    {Result, InpRem, NewIndex} -> p_all(Parsers, InpRem, NewIndex, [Result|Accum])
+  end.
+-endif.
+
+-ifdef(p_choose).
+-spec p_choose([parse_fun()]) -> parse_fun().
+p_choose(Parsers) ->
+  fun(Input, Index) ->
+      p_attempt(Parsers, Input, Index, none)
+  end.
+
+-spec p_attempt([parse_fun()], input(), index(), none | parse_failure()) -> parse_result().
+p_attempt([], _Input, _Index, Failure) -> Failure;
+p_attempt([P|Parsers], Input, Index, FirstFailure)->
+  case P(Input, Index) of
+    {fail, _} = Failure ->
+      case FirstFailure of
+        none -> p_attempt(Parsers, Input, Index, Failure);
+        _ -> p_attempt(Parsers, Input, Index, FirstFailure)
+      end;
+    Result -> Result
+  end.
+-endif.
+
+-ifdef(p_zero_or_more).
+-spec p_zero_or_more(parse_fun()) -> parse_fun().
+p_zero_or_more(P) ->
+  fun(Input, Index) ->
+      p_scan(P, Input, Index, [])
+  end.
+-endif.
+
+-ifdef(p_one_or_more).
+-spec p_one_or_more(parse_fun()) -> parse_fun().
+p_one_or_more(P) ->
+  fun(Input, Index)->
+      Result = p_scan(P, Input, Index, []),
+      case Result of
+        {[_|_], _, _} ->
+          Result;
+        _ ->
+          {fail, {expected, Failure, _}} = P(Input,Index),
+          {fail, {expected, {at_least_one, Failure}, Index}}
+      end
+  end.
+-endif.
+
+-ifdef(p_label).
+-spec p_label(atom(), parse_fun()) -> parse_fun().
+p_label(Tag, P) ->
+  fun(Input, Index) ->
+      case P(Input, Index) of
+        {fail,_} = Failure ->
+           Failure;
+        {Result, InpRem, NewIndex} ->
+          {{Tag, Result}, InpRem, NewIndex}
+      end
+  end.
+-endif.
+
+-ifdef(p_scan).
+-spec p_scan(parse_fun(), input(), index(), [term()]) -> {[term()], input(), index()}.
+p_scan(_, <<>>, Index, Accum) -> {lists:reverse(Accum), <<>>, Index};
+p_scan(P, Inp, Index, Accum) ->
+  case P(Inp, Index) of
+    {fail,_} -> {lists:reverse(Accum), Inp, Index};
+    {Result, InpRem, NewIndex} -> p_scan(P, InpRem, NewIndex, [Result | Accum])
+  end.
+-endif.
+
+-ifdef(p_string).
+-spec p_string(binary()) -> parse_fun().
+p_string(S) ->
+    Length = erlang:byte_size(S),
+    fun(Input, Index) ->
+      try
+          <<S:Length/binary, Rest/binary>> = Input,
+          {S, Rest, p_advance_index(S, Index)}
+      catch
+          error:{badmatch,_} -> {fail, {expected, {string, S}, Index}}
+      end
+    end.
+-endif.
+
+-ifdef(p_anything).
+-spec p_anything() -> parse_fun().
+p_anything() ->
+  fun(<<>>, Index) -> {fail, {expected, any_character, Index}};
+     (Input, Index) when is_binary(Input) ->
+          <<C/utf8, Rest/binary>> = Input,
+          {<<C/utf8>>, Rest, p_advance_index(<<C/utf8>>, Index)}
+  end.
+-endif.
+
+-ifdef(p_charclass).
+-spec p_charclass(string() | binary()) -> parse_fun().
+p_charclass(Class) ->
+    {ok, RE} = re:compile(Class, [unicode, dotall]),
+    fun(Inp, Index) ->
+            case re:run(Inp, RE, [anchored]) of
+                {match, [{0, Length}|_]} ->
+                    {Head, Tail} = erlang:split_binary(Inp, Length),
+                    {Head, Tail, p_advance_index(Head, Index)};
+                _ -> {fail, {expected, {character_class, binary_to_list(Class)}, Index}}
+            end
+    end.
+-endif.
+
+-ifdef(p_regexp).
+-spec p_regexp(binary()) -> parse_fun().
+p_regexp(Regexp) ->
+    {ok, RE} = re:compile(Regexp, [unicode, dotall, anchored]),
+    fun(Inp, Index) ->
+        case re:run(Inp, RE) of
+            {match, [{0, Length}|_]} ->
+                {Head, Tail} = erlang:split_binary(Inp, Length),
+                {Head, Tail, p_advance_index(Head, Index)};
+            _ -> {fail, {expected, {regexp, binary_to_list(Regexp)}, Index}}
+        end
+    end.
+-endif.
+
+-ifdef(line).
+-spec line(index() | term()) -> pos_integer() | undefined.
+line({{line,L},_}) -> L;
+line(_) -> undefined.
+-endif.
+
+-ifdef(column).
+-spec column(index() | term()) -> pos_integer() | undefined.
+column({_,{column,C}}) -> C;
+column(_) -> undefined.
+-endif.
+
+-spec p_advance_index(input() | unicode:charlist() | pos_integer(), index()) -> index().
+p_advance_index(MatchedInput, Index) when is_list(MatchedInput) orelse is_binary(MatchedInput)-> % strings
+  lists:foldl(fun p_advance_index/2, Index, unicode:characters_to_list(MatchedInput));
+p_advance_index(MatchedInput, Index) when is_integer(MatchedInput) -> % single characters
+  {{line, Line}, {column, Col}} = Index,
+  case MatchedInput of
+    $\n -> {{line, Line+1}, {column, 1}};
+    _ -> {{line, Line}, {column, Col+1}}
+  end.
similarity index 98%
rename from rabbitmq-server/src/gen_server2.erl
rename to deps/rabbit_common/src/gen_server2.erl
index ffc075da7f0c7d31eb60c24578418ffbea8f6bca..e2265f7d04a5e7f87e511564290625096730d078 100644 (file)
 -record(gs2_state, {parent, name, state, mod, time,
                     timeout_state, queue, debug, prioritisers}).
 
--ifdef(use_specs).
-
 %%%=========================================================================
 %%%  Specs. These exist only to shut up dialyzer's warnings
 %%%=========================================================================
 
--type(gs2_state() :: #gs2_state{}).
+-type gs2_state() :: #gs2_state{}.
 
--spec(handle_common_termination/3 ::
-        (any(), atom(), gs2_state()) -> no_return()).
--spec(hibernate/1 :: (gs2_state()) -> no_return()).
--spec(pre_hibernate/1 :: (gs2_state()) -> no_return()).
--spec(system_terminate/4 :: (_, _, _, gs2_state()) -> no_return()).
+-spec handle_common_termination(any(), atom(), gs2_state()) -> no_return().
+-spec hibernate(gs2_state()) -> no_return().
+-spec pre_hibernate(gs2_state()) -> no_return().
+-spec system_terminate(_, _, _, gs2_state()) -> no_return().
 
--type(millis() :: non_neg_integer()).
+-type millis() :: non_neg_integer().
 
 %%%=========================================================================
 %%%  API
 %% for handle_pre_hibernate/1 and handle_post_hibernate/1 will result
 %% in warnings (the same applied for the behaviour_info before).
 
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{init,1},{handle_call,3},{handle_cast,2},{handle_info,2},
-     {terminate,2},{code_change,3}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
-
 %%%  -----------------------------------------------------------------
 %%% Starts a generic server.
 %%% start(Mod, Args, Options)
@@ -624,7 +609,8 @@ unregister_name(_Name) -> ok.
 extend_backoff(undefined) ->
     undefined;
 extend_backoff({backoff, InitialTimeout, MinimumTimeout, DesiredHibPeriod}) ->
-    {backoff, InitialTimeout, MinimumTimeout, DesiredHibPeriod, now()}.
+    {backoff, InitialTimeout, MinimumTimeout, DesiredHibPeriod,
+      rand_compat:seed(exsplus)}.
 
 %%%========================================================================
 %%% Internal functions
@@ -695,7 +681,9 @@ wake_hib(GS2State = #gs2_state { timeout_state = TS }) ->
                         undefined ->
                             undefined;
                         {SleptAt, TimeoutState} ->
-                            adjust_timeout_state(SleptAt, now(), TimeoutState)
+                            adjust_timeout_state(SleptAt,
+                                                 time_compat:monotonic_time(),
+                                                 TimeoutState)
                     end,
     post_hibernate(
       drain(GS2State #gs2_state { timeout_state = TimeoutState1 })).
@@ -703,7 +691,8 @@ wake_hib(GS2State = #gs2_state { timeout_state = TS }) ->
 hibernate(GS2State = #gs2_state { timeout_state = TimeoutState }) ->
     TS = case TimeoutState of
              undefined             -> undefined;
-             {backoff, _, _, _, _} -> {now(), TimeoutState}
+             {backoff, _, _, _, _} -> {time_compat:monotonic_time(),
+                                       TimeoutState}
          end,
     proc_lib:hibernate(?MODULE, wake_hib,
                        [GS2State #gs2_state { timeout_state = TS }]).
@@ -748,7 +737,8 @@ post_hibernate(GS2State = #gs2_state { state = State,
 
 adjust_timeout_state(SleptAt, AwokeAt, {backoff, CurrentTO, MinimumTO,
                                         DesiredHibPeriod, RandomState}) ->
-    NapLengthMicros = timer:now_diff(AwokeAt, SleptAt),
+    NapLengthMicros = time_compat:convert_time_unit(AwokeAt - SleptAt,
+                                                    native, micro_seconds),
     CurrentMicros = CurrentTO * 1000,
     MinimumMicros = MinimumTO * 1000,
     DesiredHibMicros = DesiredHibPeriod * 1000,
@@ -760,7 +750,7 @@ adjust_timeout_state(SleptAt, AwokeAt, {backoff, CurrentTO, MinimumTO,
             true -> lists:max([MinimumTO, CurrentTO div 2]);
             false -> CurrentTO
         end,
-    {Extra, RandomState1} = random:uniform_s(Base, RandomState),
+    {Extra, RandomState1} = rand_compat:uniform_s(Base, RandomState),
     CurrentTO1 = Base + Extra,
     {backoff, CurrentTO1, MinimumTO, DesiredHibPeriod, RandomState1}.
 
similarity index 92%
rename from rabbitmq-server/src/mirrored_supervisor.erl
rename to deps/rabbit_common/src/mirrored_supervisor.erl
index 96c1418791592f5383f1d2742a9afa50b54fcf2d..ec21a8e32b63f45f370d8d13aec13582ba43e4b4 100644 (file)
 
 -define(SUPERVISOR, supervisor2).
 -define(GEN_SERVER, gen_server2).
--define(PG2,        pg2_fixed).
 -define(SUP_MODULE, mirrored_supervisor_sups).
 
 -define(TABLE, mirrored_sup_childspec).
                 delegate,
                 group,
                 tx_fun,
-                initial_childspecs}).
-
-%%----------------------------------------------------------------------------
-
--ifdef(use_specs).
+                initial_childspecs,
+                child_order}).
 
 %%--------------------------------------------------------------------------
 %% Callback behaviour
 -spec create_tables() -> Result when
       Result :: 'ok'.
 
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) -> [{init,1}];
-behaviour_info(_Other)    -> undefined.
-
--endif.
-
 %%----------------------------------------------------------------------------
 
 start_link(Group, TxFun, Mod, Args) ->
@@ -266,7 +253,7 @@ fold(FunAtom, Sup, AggFun) ->
     Group = call(Sup, group),
     lists:foldl(AggFun, [],
                 [apply(?SUPERVISOR, FunAtom, [D]) ||
-                    M <- ?PG2:get_members(Group),
+                    M <- pg2:get_members(Group),
                     D <- [delegate(M)]]).
 
 child(Sup, Id) ->
@@ -288,7 +275,8 @@ start_internal(Group, TxFun, ChildSpecs) ->
 init({Group, TxFun, ChildSpecs}) ->
     {ok, #state{group              = Group,
                 tx_fun             = TxFun,
-                initial_childspecs = ChildSpecs}}.
+                initial_childspecs = ChildSpecs,
+                child_order = child_order_from(ChildSpecs)}}.
 
 handle_call({init, Overall}, _From,
             State = #state{overall            = undefined,
@@ -297,9 +285,9 @@ handle_call({init, Overall}, _From,
                            tx_fun             = TxFun,
                            initial_childspecs = ChildSpecs}) ->
     process_flag(trap_exit, true),
-    ?PG2:create(Group),
-    ok = ?PG2:join(Group, Overall),
-    Rest = ?PG2:get_members(Group) -- [Overall],
+    pg2:create(Group),
+    ok = pg2:join(Group, Overall),
+    Rest = pg2:get_members(Group) -- [Overall],
     case Rest of
         [] -> TxFun(fun() -> delete_all(Group) end);
         _  -> ok
@@ -347,7 +335,7 @@ handle_cast({ensure_monitoring, Pid}, State) ->
     {noreply, State};
 
 handle_cast({die, Reason}, State = #state{group = Group}) ->
-    tell_all_peers_to_die(Group, Reason),
+    _ = tell_all_peers_to_die(Group, Reason),
     {stop, Reason, State};
 
 handle_cast(Msg, State) ->
@@ -364,20 +352,23 @@ handle_info({'DOWN', _Ref, process, Pid, Reason},
     %%
     %% Therefore if we get here we know we need to cause the entire
     %% mirrored sup to shut down, not just fail over.
-    tell_all_peers_to_die(Group, Reason),
+    _ = tell_all_peers_to_die(Group, Reason),
     {stop, Reason, State};
 
 handle_info({'DOWN', _Ref, process, Pid, _Reason},
             State = #state{delegate = Delegate,
                            group    = Group,
                            tx_fun   = TxFun,
-                           overall  = O}) ->
+                           overall  = O,
+                           child_order = ChildOrder}) ->
     %% TODO load balance this
     %% No guarantee pg2 will have received the DOWN before us.
-    R = case lists:sort(?PG2:get_members(Group)) -- [Pid] of
+    R = case lists:sort(pg2:get_members(Group)) -- [Pid] of
             [O | _] -> ChildSpecs =
                            TxFun(fun() -> update_all(O, Pid) end),
-                       [start(Delegate, ChildSpec) || ChildSpec <- ChildSpecs];
+                       [start(Delegate, ChildSpec)
+                        || ChildSpec <- restore_child_order(ChildSpecs,
+                           ChildOrder)];
             _       -> []
         end,
     case errors(R) of
@@ -397,7 +388,7 @@ code_change(_OldVsn, State, _Extra) ->
 %%----------------------------------------------------------------------------
 
 tell_all_peers_to_die(Group, Reason) ->
-    [cast(P, {die, Reason}) || P <- ?PG2:get_members(Group) -- [self()]].
+    [cast(P, {die, Reason}) || P <- pg2:get_members(Group) -- [self()]].
 
 maybe_start(Group, TxFun, Overall, Delegate, ChildSpec) ->
     try TxFun(fun() -> check_start(Group, Overall, Delegate, ChildSpec) end) of
@@ -411,14 +402,14 @@ maybe_start(Group, TxFun, Overall, Delegate, ChildSpec) ->
 
 check_start(Group, Overall, Delegate, ChildSpec) ->
     case mnesia:wread({?TABLE, {Group, id(ChildSpec)}}) of
-        []  -> write(Group, Overall, ChildSpec),
+        []  -> _ = write(Group, Overall, ChildSpec),
                start;
         [S] -> #mirrored_sup_childspec{key           = {Group, Id},
                                        mirroring_pid = Pid} = S,
                case Overall of
                    Pid -> child(Delegate, Id);
                    _   -> case supervisor(Pid) of
-                              dead      -> write(Group, Overall, ChildSpec),
+                              dead      -> _ = write(Group, Overall, ChildSpec),
                                            start;
                               Delegate0 -> child(Delegate0, Id)
                           end
@@ -515,3 +506,14 @@ add_proplists([{K1, _} = KV | P1], [{K2, _} | _] = P2, Acc) when K1 < K2 ->
     add_proplists(P1, P2, [KV | Acc]);
 add_proplists(P1, [KV | P2], Acc) ->
     add_proplists(P1, P2, [KV | Acc]).
+
+child_order_from(ChildSpecs) ->
+    lists:zipwith(fun(C, N) ->
+                          {id(C), N}
+                  end, ChildSpecs, lists:seq(1, length(ChildSpecs))).
+
+restore_child_order(ChildSpecs, ChildOrder) ->
+    lists:sort(fun(A, B) ->
+                       proplists:get_value(id(A), ChildOrder)
+                           < proplists:get_value(id(B), ChildOrder)
+               end, ChildSpecs).
similarity index 84%
rename from rabbitmq-server/src/pmon.erl
rename to deps/rabbit_common/src/pmon.erl
index f42530022a0e6a2a3f881ceb638fd1b07b9cae87..5263333f56bc5ea5ee8c23a92c9d5bed934eb92c 100644 (file)
 
 -record(state, {dict, module}).
 
--ifdef(use_specs).
-
 %%----------------------------------------------------------------------------
 
 -export_type([?MODULE/0]).
 
--opaque(?MODULE() :: #state{dict   :: dict:dict(),
+-include("include/old_builtin_types.hrl").
+
+-opaque(?MODULE() :: #state{dict   :: ?DICT_TYPE(),
                             module :: atom()}).
 
 -type(item()         :: pid() | {atom(), node()}).
 
--spec(new/0          :: () -> ?MODULE()).
--spec(new/1          :: ('erlang' | 'delegate') -> ?MODULE()).
--spec(monitor/2      :: (item(), ?MODULE()) -> ?MODULE()).
--spec(monitor_all/2  :: ([item()], ?MODULE()) -> ?MODULE()).
--spec(demonitor/2    :: (item(), ?MODULE()) -> ?MODULE()).
--spec(is_monitored/2 :: (item(), ?MODULE()) -> boolean()).
--spec(erase/2        :: (item(), ?MODULE()) -> ?MODULE()).
--spec(monitored/1    :: (?MODULE()) -> [item()]).
--spec(is_empty/1     :: (?MODULE()) -> boolean()).
-
--endif.
+-spec new() -> ?MODULE().
+-spec new('erlang' | 'delegate') -> ?MODULE().
+-spec monitor(item(), ?MODULE()) -> ?MODULE().
+-spec monitor_all([item()], ?MODULE()) -> ?MODULE().
+-spec demonitor(item(), ?MODULE()) -> ?MODULE().
+-spec is_monitored(item(), ?MODULE()) -> boolean().
+-spec erase(item(), ?MODULE()) -> ?MODULE().
+-spec monitored(?MODULE()) -> [item()].
+-spec is_empty(?MODULE()) -> boolean().
 
 new() -> new(erlang).
 
@@ -84,7 +82,7 @@ demonitor(Item, S = #state{dict = M, module = Module}) ->
     case dict:find(Item, M) of
         {ok, MRef} -> Module:demonitor(MRef),
                       S#state{dict = dict:erase(Item, M)};
-        error      -> M
+        error      -> S
     end.
 
 is_monitored(Item, #state{dict = M}) -> dict:is_key(Item, M).
similarity index 89%
rename from rabbitmq-server/src/priority_queue.erl
rename to deps/rabbit_common/src/priority_queue.erl
index 88c69513d7d0962980eb3dd33beac11e2e098a46..81969477c34661cea2a0b7caf9f9a06edc34cc85 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% Priority queues have essentially the same interface as ordinary
@@ -45,8 +45,6 @@
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([q/0]).
 
 -type(q() :: pqueue()).
 -type(squeue() :: {queue, [any()], [any()], non_neg_integer()}).
 -type(pqueue() ::  squeue() | {pqueue, [{priority(), squeue()}]}).
 
--spec(new/0 :: () -> pqueue()).
--spec(is_queue/1 :: (any()) -> boolean()).
--spec(is_empty/1 :: (pqueue()) -> boolean()).
--spec(len/1 :: (pqueue()) -> non_neg_integer()).
--spec(to_list/1 :: (pqueue()) -> [{priority(), any()}]).
--spec(from_list/1 :: ([{priority(), any()}]) -> pqueue()).
--spec(in/2 :: (any(), pqueue()) -> pqueue()).
--spec(in/3 :: (any(), priority(), pqueue()) -> pqueue()).
--spec(out/1 :: (pqueue()) -> {empty | {value, any()}, pqueue()}).
--spec(out_p/1 :: (pqueue()) -> {empty | {value, any(), priority()}, pqueue()}).
--spec(join/2 :: (pqueue(), pqueue()) -> pqueue()).
--spec(filter/2 :: (fun ((any()) -> boolean()), pqueue()) -> pqueue()).
--spec(fold/3 ::
-        (fun ((any(), priority(), A) -> A), A, pqueue()) -> A).
--spec(highest/1 :: (pqueue()) -> priority() | 'empty').
-
--endif.
+-spec new() -> pqueue().
+-spec is_queue(any()) -> boolean().
+-spec is_empty(pqueue()) -> boolean().
+-spec len(pqueue()) -> non_neg_integer().
+-spec to_list(pqueue()) -> [{priority(), any()}].
+-spec from_list([{priority(), any()}]) -> pqueue().
+-spec in(any(), pqueue()) -> pqueue().
+-spec in(any(), priority(), pqueue()) -> pqueue().
+-spec out(pqueue()) -> {empty | {value, any()}, pqueue()}.
+-spec out_p(pqueue()) -> {empty | {value, any(), priority()}, pqueue()}.
+-spec join(pqueue(), pqueue()) -> pqueue().
+-spec filter(fun ((any()) -> boolean()), pqueue()) -> pqueue().
+-spec fold
+        (fun ((any(), priority(), A) -> A), A, pqueue()) -> A.
+-spec highest(pqueue()) -> priority() | 'empty'.
 
 %%----------------------------------------------------------------------------
 
similarity index 69%
rename from rabbitmq-server/src/rabbit_amqqueue.erl
rename to deps/rabbit_common/src/rabbit_amqqueue.erl
index f6cc0fbddab374b03a20080e82b8dede5b468c22..64dc33881a6b2dc3f65fa84525b7ed3348559fa4 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqqueue).
          assert_equivalence/5,
          check_exclusive_access/2, with_exclusive_access_or_die/3,
          stat/1, deliver/2, requeue/3, ack/3, reject/4]).
--export([list/0, list/1, info_keys/0, info/1, info/2, info_all/1, info_all/2]).
+-export([list/0, list/1, info_keys/0, info/1, info/2, info_all/1, info_all/2,
+         info_all/5, info_local/1]).
 -export([list_down/1]).
 -export([force_event_refresh/1, notify_policy_changed/1]).
--export([consumers/1, consumers_all/1, consumer_info_keys/0]).
+-export([consumers/1, consumers_all/1,  consumers_all/3, consumer_info_keys/0]).
 -export([basic_get/4, basic_consume/10, basic_cancel/4, notify_decorators/1]).
 -export([notify_sent/2, notify_sent_queue_down/1, resume/2]).
--export([notify_down_all/2, activate_limit_all/2, credit/5]).
+-export([notify_down_all/2, notify_down_all/3, activate_limit_all/2, credit/5]).
 -export([on_node_up/1, on_node_down/1]).
 -export([update/2, store_queue/1, update_decorators/1, policy_changed/2]).
--export([start_mirroring/1, stop_mirroring/1, sync_mirrors/1,
-         cancel_sync_mirrors/1]).
+-export([update_mirroring/1, sync_mirrors/1, cancel_sync_mirrors/1, is_mirrored/1]).
+
+-export([pid_of/1, pid_of/2]).
 
 %% internal
 -export([internal_declare/2, internal_delete/1, run_backing_queue/3,
 -include("rabbit.hrl").
 -include_lib("stdlib/include/qlc.hrl").
 
--define(INTEGER_ARG_TYPES, [byte, short, signedint, long]).
+-define(INTEGER_ARG_TYPES, [byte, short, signedint, long,
+                            unsignedbyte, unsignedshort, unsignedint]).
 
 -define(MORE_CONSUMER_CREDIT_AFTER, 50).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([name/0, qmsg/0, absent_reason/0]).
 
--type(name() :: rabbit_types:r('queue')).
--type(qpids() :: [pid()]).
--type(qlen() :: rabbit_types:ok(non_neg_integer())).
--type(qfun(A) :: fun ((rabbit_types:amqqueue()) -> A | no_return())).
--type(qmsg() :: {name(), pid(), msg_id(), boolean(), rabbit_types:message()}).
--type(msg_id() :: non_neg_integer()).
--type(ok_or_errors() ::
-        'ok' | {'error', [{'error' | 'exit' | 'throw', any()}]}).
--type(absent_reason() :: 'nodedown' | 'crashed').
--type(queue_or_absent() :: rabbit_types:amqqueue() |
-                           {'absent', rabbit_types:amqqueue(),absent_reason()}).
--type(not_found_or_absent() ::
-        'not_found' | {'absent', rabbit_types:amqqueue(), absent_reason()}).
--spec(recover/0 :: () -> [rabbit_types:amqqueue()]).
--spec(stop/0 :: () -> 'ok').
--spec(start/1 :: ([rabbit_types:amqqueue()]) -> 'ok').
--spec(declare/5 ::
-        (name(), boolean(), boolean(),
-         rabbit_framing:amqp_table(), rabbit_types:maybe(pid()))
-        -> {'new' | 'existing' | 'absent' | 'owner_died',
-            rabbit_types:amqqueue()} | rabbit_types:channel_exit()).
--spec(declare/6 ::
-        (name(), boolean(), boolean(),
-         rabbit_framing:amqp_table(), rabbit_types:maybe(pid()), node())
-        -> {'new' | 'existing' | 'owner_died', rabbit_types:amqqueue()} |
-           {'absent', rabbit_types:amqqueue(), absent_reason()} |
-           rabbit_types:channel_exit()).
--spec(internal_declare/2 ::
-        (rabbit_types:amqqueue(), boolean())
-        -> queue_or_absent() | rabbit_misc:thunk(queue_or_absent())).
--spec(update/2 ::
-        (name(),
-         fun((rabbit_types:amqqueue()) -> rabbit_types:amqqueue()))
-         -> 'not_found' | rabbit_types:amqqueue()).
--spec(lookup/1 ::
-        (name()) -> rabbit_types:ok(rabbit_types:amqqueue()) |
-                    rabbit_types:error('not_found');
-        ([name()]) -> [rabbit_types:amqqueue()]).
--spec(not_found_or_absent/1 :: (name()) -> not_found_or_absent()).
--spec(with/2 :: (name(), qfun(A)) ->
-                     A | rabbit_types:error(not_found_or_absent())).
--spec(with/3 :: (name(), qfun(A), fun((not_found_or_absent()) -> B)) -> A | B).
--spec(with_or_die/2 ::
-        (name(), qfun(A)) -> A | rabbit_types:channel_exit()).
--spec(assert_equivalence/5 ::
+-type name() :: rabbit_types:r('queue').
+-type qpids() :: [pid()].
+-type qlen() :: rabbit_types:ok(non_neg_integer()).
+-type qfun(A) :: fun ((rabbit_types:amqqueue()) -> A | no_return()).
+-type qmsg() :: {name(), pid(), msg_id(), boolean(), rabbit_types:message()}.
+-type msg_id() :: non_neg_integer().
+-type ok_or_errors() ::
+        'ok' | {'error', [{'error' | 'exit' | 'throw', any()}]}.
+-type absent_reason() :: 'nodedown' | 'crashed'.
+-type queue_or_absent() :: rabbit_types:amqqueue() |
+                           {'absent', rabbit_types:amqqueue(),absent_reason()}.
+-type not_found_or_absent() ::
+        'not_found' | {'absent', rabbit_types:amqqueue(), absent_reason()}.
+-spec recover() -> [rabbit_types:amqqueue()].
+-spec stop() -> 'ok'.
+-spec start([rabbit_types:amqqueue()]) -> 'ok'.
+-spec declare
+        (name(), boolean(), boolean(), rabbit_framing:amqp_table(),
+         rabbit_types:maybe(pid())) ->
+            {'new' | 'existing' | 'absent' | 'owner_died',
+             rabbit_types:amqqueue()} |
+            rabbit_types:channel_exit().
+-spec declare
+        (name(), boolean(), boolean(), rabbit_framing:amqp_table(),
+         rabbit_types:maybe(pid()), node()) ->
+            {'new' | 'existing' | 'owner_died', rabbit_types:amqqueue()} |
+            {'absent', rabbit_types:amqqueue(), absent_reason()} |
+            rabbit_types:channel_exit().
+-spec internal_declare(rabbit_types:amqqueue(), boolean()) ->
+          queue_or_absent() | rabbit_misc:thunk(queue_or_absent()).
+-spec update
+        (name(), fun((rabbit_types:amqqueue()) -> rabbit_types:amqqueue())) ->
+            'not_found' | rabbit_types:amqqueue().
+-spec lookup
+        (name()) ->
+            rabbit_types:ok(rabbit_types:amqqueue()) |
+            rabbit_types:error('not_found');
+        ([name()]) ->
+            [rabbit_types:amqqueue()].
+-spec not_found_or_absent(name()) -> not_found_or_absent().
+-spec with(name(), qfun(A)) ->
+          A | rabbit_types:error(not_found_or_absent()).
+-spec with(name(), qfun(A), fun((not_found_or_absent()) -> B)) -> A | B.
+-spec with_or_die(name(), qfun(A)) -> A | rabbit_types:channel_exit().
+-spec assert_equivalence
         (rabbit_types:amqqueue(), boolean(), boolean(),
-         rabbit_framing:amqp_table(), rabbit_types:maybe(pid()))
-        -> 'ok' | rabbit_types:channel_exit() |
-           rabbit_types:connection_exit()).
--spec(check_exclusive_access/2 ::
-        (rabbit_types:amqqueue(), pid())
-        -> 'ok' | rabbit_types:channel_exit()).
--spec(with_exclusive_access_or_die/3 ::
-        (name(), pid(), qfun(A)) -> A | rabbit_types:channel_exit()).
--spec(list/0 :: () -> [rabbit_types:amqqueue()]).
--spec(list/1 :: (rabbit_types:vhost()) -> [rabbit_types:amqqueue()]).
--spec(list_down/1 :: (rabbit_types:vhost()) -> [rabbit_types:amqqueue()]).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(info/1 :: (rabbit_types:amqqueue()) -> rabbit_types:infos()).
--spec(info/2 ::
-        (rabbit_types:amqqueue(), rabbit_types:info_keys())
-        -> rabbit_types:infos()).
--spec(info_all/1 :: (rabbit_types:vhost()) -> [rabbit_types:infos()]).
--spec(info_all/2 :: (rabbit_types:vhost(), rabbit_types:info_keys())
-                    -> [rabbit_types:infos()]).
--spec(force_event_refresh/1 :: (reference()) -> 'ok').
--spec(notify_policy_changed/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(consumers/1 :: (rabbit_types:amqqueue())
-                     -> [{pid(), rabbit_types:ctag(), boolean(),
-                          non_neg_integer(), rabbit_framing:amqp_table()}]).
--spec(consumer_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(consumers_all/1 ::
-        (rabbit_types:vhost())
-        -> [{name(), pid(), rabbit_types:ctag(), boolean(),
-             non_neg_integer(), rabbit_framing:amqp_table()}]).
--spec(stat/1 ::
-        (rabbit_types:amqqueue())
-        -> {'ok', non_neg_integer(), non_neg_integer()}).
--spec(delete_immediately/1 :: (qpids()) -> 'ok').
--spec(delete/3 ::
-        (rabbit_types:amqqueue(), 'false', 'false')
-        -> qlen();
-        (rabbit_types:amqqueue(), 'true' , 'false')
-        -> qlen() | rabbit_types:error('in_use');
-        (rabbit_types:amqqueue(), 'false', 'true' )
-        -> qlen() | rabbit_types:error('not_empty');
-        (rabbit_types:amqqueue(), 'true' , 'true' )
-        -> qlen() |
-           rabbit_types:error('in_use') |
-           rabbit_types:error('not_empty')).
--spec(delete_crashed/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(delete_crashed_internal/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(purge/1 :: (rabbit_types:amqqueue()) -> qlen()).
--spec(forget_all_durable/1 :: (node()) -> 'ok').
--spec(deliver/2 :: ([rabbit_types:amqqueue()], rabbit_types:delivery()) ->
-                        qpids()).
--spec(requeue/3 :: (pid(), [msg_id()],  pid()) -> 'ok').
--spec(ack/3 :: (pid(), [msg_id()], pid()) -> 'ok').
--spec(reject/4 :: (pid(), [msg_id()], boolean(), pid()) -> 'ok').
--spec(notify_down_all/2 :: (qpids(), pid()) -> ok_or_errors()).
--spec(activate_limit_all/2 :: (qpids(), pid()) -> ok_or_errors()).
--spec(basic_get/4 :: (rabbit_types:amqqueue(), pid(), boolean(), pid()) ->
-                          {'ok', non_neg_integer(), qmsg()} | 'empty').
--spec(credit/5 :: (rabbit_types:amqqueue(), pid(), rabbit_types:ctag(),
-                   non_neg_integer(), boolean()) -> 'ok').
--spec(basic_consume/10 ::
+         rabbit_framing:amqp_table(), rabbit_types:maybe(pid())) ->
+            'ok' | rabbit_types:channel_exit() | rabbit_types:connection_exit().
+-spec check_exclusive_access(rabbit_types:amqqueue(), pid()) ->
+          'ok' | rabbit_types:channel_exit().
+-spec with_exclusive_access_or_die(name(), pid(), qfun(A)) ->
+          A | rabbit_types:channel_exit().
+-spec list() -> [rabbit_types:amqqueue()].
+-spec list(rabbit_types:vhost()) -> [rabbit_types:amqqueue()].
+-spec list_down(rabbit_types:vhost()) -> [rabbit_types:amqqueue()].
+-spec info_keys() -> rabbit_types:info_keys().
+-spec info(rabbit_types:amqqueue()) -> rabbit_types:infos().
+-spec info(rabbit_types:amqqueue(), rabbit_types:info_keys()) ->
+          rabbit_types:infos().
+-spec info_all(rabbit_types:vhost()) -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:vhost(), rabbit_types:info_keys()) ->
+          [rabbit_types:infos()].
+-type info_all_filter() :: 'all' | 'online' | 'offline' | 'local'.
+-spec info_all
+        (rabbit_types:vhost(), rabbit_types:info_keys(), info_all_filter(),
+         reference(), pid()) ->
+            'ok'.
+-spec force_event_refresh(reference()) -> 'ok'.
+-spec notify_policy_changed(rabbit_types:amqqueue()) -> 'ok'.
+-spec consumers(rabbit_types:amqqueue()) ->
+          [{pid(), rabbit_types:ctag(), boolean(), non_neg_integer(),
+            rabbit_framing:amqp_table()}].
+-spec consumer_info_keys() -> rabbit_types:info_keys().
+-spec consumers_all(rabbit_types:vhost()) ->
+          [{name(), pid(), rabbit_types:ctag(), boolean(),
+            non_neg_integer(), rabbit_framing:amqp_table()}].
+-spec consumers_all(rabbit_types:vhost(), reference(), pid()) -> 'ok'.
+-spec stat(rabbit_types:amqqueue()) ->
+          {'ok', non_neg_integer(), non_neg_integer()}.
+-spec delete_immediately(qpids()) -> 'ok'.
+-spec delete
+        (rabbit_types:amqqueue(), 'false', 'false') ->
+            qlen();
+        (rabbit_types:amqqueue(), 'true' , 'false') ->
+            qlen() | rabbit_types:error('in_use');
+        (rabbit_types:amqqueue(), 'false', 'true' ) ->
+            qlen() | rabbit_types:error('not_empty');
+        (rabbit_types:amqqueue(), 'true' , 'true' ) ->
+            qlen() |
+            rabbit_types:error('in_use') |
+            rabbit_types:error('not_empty').
+-spec delete_crashed(rabbit_types:amqqueue()) -> 'ok'.
+-spec delete_crashed_internal(rabbit_types:amqqueue()) -> 'ok'.
+-spec purge(rabbit_types:amqqueue()) -> qlen().
+-spec forget_all_durable(node()) -> 'ok'.
+-spec deliver([rabbit_types:amqqueue()], rabbit_types:delivery()) ->
+                        qpids().
+-spec requeue(pid(), [msg_id()],  pid()) -> 'ok'.
+-spec ack(pid(), [msg_id()], pid()) -> 'ok'.
+-spec reject(pid(), [msg_id()], boolean(), pid()) -> 'ok'.
+-spec notify_down_all(qpids(), pid()) -> ok_or_errors().
+-spec notify_down_all(qpids(), pid(), non_neg_integer()) ->
+          ok_or_errors().
+-spec activate_limit_all(qpids(), pid()) -> ok_or_errors().
+-spec basic_get(rabbit_types:amqqueue(), pid(), boolean(), pid()) ->
+          {'ok', non_neg_integer(), qmsg()} | 'empty'.
+-spec credit
+        (rabbit_types:amqqueue(), pid(), rabbit_types:ctag(), non_neg_integer(),
+         boolean()) ->
+            'ok'.
+-spec basic_consume
         (rabbit_types:amqqueue(), boolean(), pid(), pid(), boolean(),
          non_neg_integer(), rabbit_types:ctag(), boolean(),
-         rabbit_framing:amqp_table(), any())
-        -> rabbit_types:ok_or_error('exclusive_consume_unavailable')).
--spec(basic_cancel/4 ::
-        (rabbit_types:amqqueue(), pid(), rabbit_types:ctag(), any()) -> 'ok').
--spec(notify_decorators/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(notify_sent/2 :: (pid(), pid()) -> 'ok').
--spec(notify_sent_queue_down/1 :: (pid()) -> 'ok').
--spec(resume/2 :: (pid(), pid()) -> 'ok').
--spec(internal_delete/1 ::
-        (name()) -> rabbit_types:ok_or_error('not_found') |
-                    rabbit_types:connection_exit() |
-                    fun (() -> rabbit_types:ok_or_error('not_found') |
-                               rabbit_types:connection_exit())).
--spec(run_backing_queue/3 ::
-        (pid(), atom(),
-         (fun ((atom(), A) -> {[rabbit_types:msg_id()], A}))) -> 'ok').
--spec(set_ram_duration_target/2 :: (pid(), number() | 'infinity') -> 'ok').
--spec(set_maximum_since_use/2 :: (pid(), non_neg_integer()) -> 'ok').
--spec(on_node_up/1 :: (node()) -> 'ok').
--spec(on_node_down/1 :: (node()) -> 'ok').
--spec(pseudo_queue/2 :: (name(), pid()) -> rabbit_types:amqqueue()).
--spec(immutable/1 :: (rabbit_types:amqqueue()) -> rabbit_types:amqqueue()).
--spec(store_queue/1 :: (rabbit_types:amqqueue()) -> 'ok').
--spec(update_decorators/1 :: (name()) -> 'ok').
--spec(policy_changed/2 ::
-        (rabbit_types:amqqueue(), rabbit_types:amqqueue()) -> 'ok').
--spec(start_mirroring/1 :: (pid()) -> 'ok').
--spec(stop_mirroring/1 :: (pid()) -> 'ok').
--spec(sync_mirrors/1 :: (pid()) -> 'ok' | rabbit_types:error('not_mirrored')).
--spec(cancel_sync_mirrors/1 :: (pid()) -> 'ok' | {'ok', 'not_syncing'}).
-
--endif.
+         rabbit_framing:amqp_table(), any()) ->
+            rabbit_types:ok_or_error('exclusive_consume_unavailable').
+-spec basic_cancel
+        (rabbit_types:amqqueue(), pid(), rabbit_types:ctag(), any()) -> 'ok'.
+-spec notify_decorators(rabbit_types:amqqueue()) -> 'ok'.
+-spec notify_sent(pid(), pid()) -> 'ok'.
+-spec notify_sent_queue_down(pid()) -> 'ok'.
+-spec resume(pid(), pid()) -> 'ok'.
+-spec internal_delete(name()) ->
+          rabbit_types:ok_or_error('not_found') |
+          rabbit_types:connection_exit() |
+          fun (() ->
+              rabbit_types:ok_or_error('not_found') |
+              rabbit_types:connection_exit()).
+-spec run_backing_queue
+        (pid(), atom(), (fun ((atom(), A) -> {[rabbit_types:msg_id()], A}))) ->
+            'ok'.
+-spec set_ram_duration_target(pid(), number() | 'infinity') -> 'ok'.
+-spec set_maximum_since_use(pid(), non_neg_integer()) -> 'ok'.
+-spec on_node_up(node()) -> 'ok'.
+-spec on_node_down(node()) -> 'ok'.
+-spec pseudo_queue(name(), pid()) -> rabbit_types:amqqueue().
+-spec immutable(rabbit_types:amqqueue()) -> rabbit_types:amqqueue().
+-spec store_queue(rabbit_types:amqqueue()) -> 'ok'.
+-spec update_decorators(name()) -> 'ok'.
+-spec policy_changed(rabbit_types:amqqueue(), rabbit_types:amqqueue()) ->
+          'ok'.
+-spec update_mirroring(pid()) -> 'ok'.
+-spec sync_mirrors(rabbit_types:amqqueue() | pid()) ->
+          'ok' | rabbit_types:error('not_mirrored').
+-spec cancel_sync_mirrors(rabbit_types:amqqueue() | pid()) ->
+          'ok' | {'ok', 'not_syncing'}.
+-spec is_mirrored(rabbit_types:amqqueue()) -> boolean().
+
+-spec pid_of(rabbit_types:amqqueue()) ->
+          {'ok', pid()} | rabbit_types:error('not_found').
+-spec pid_of(rabbit_types:vhost(), rabbit_misc:resource_name()) ->
+          {'ok', pid()} | rabbit_types:error('not_found').
 
 %%----------------------------------------------------------------------------
 
@@ -205,6 +217,19 @@ recover() ->
     %% faster than other nodes handled DOWN messages from us.
     on_node_down(node()),
     DurableQueues = find_durable_queues(),
+    L = length(DurableQueues),
+
+    %% if there are not enough file handles, the server might hang
+    %% when trying to recover queues, warn the user:
+    case file_handle_cache:get_limit() < L of
+        true ->
+            rabbit_log:warning(
+              "Recovering ~p queues, available file handles: ~p. Please increase max open file handles limit to at least ~p!~n",
+              [L, file_handle_cache:get_limit(), L]);
+        false ->
+            ok
+    end,
+
     {ok, BQ} = application:get_env(rabbit, backing_queue_module),
 
     %% We rely on BQ:start/1 returning the recovery terms in the same
@@ -240,8 +265,13 @@ find_durable_queues() ->
               qlc:e(qlc:q([Q || Q = #amqqueue{name = Name,
                                               pid  = Pid}
                                     <- mnesia:table(rabbit_durable_queue),
-                                node(Pid) == Node,
-                                mnesia:read(rabbit_queue, Name, read) =:= []]))
+                                node(Pid) == Node andalso
+                               %% Terminations on node down will not remove the rabbit_queue
+                               %% record if it is a mirrored queue (such info is now obtained from
+                               %% the policy). Thus, we must check if the local pid is alive
+                               %% - if the record is present - in order to restart.
+                                                   (mnesia:read(rabbit_queue, Name, read) =:= []
+                                                    orelse not erlang:is_process_alive(Pid))]))
       end).
 
 recover_durable_queues(QueuesAndRecoveryTerms) ->
@@ -273,10 +303,17 @@ declare(QueueName, Durable, AutoDelete, Args, Owner, Node) ->
                                       sync_slave_pids    = [],
                                       recoverable_slaves = [],
                                       gm_pids            = [],
-                                      state              = live})),
-    Node = rabbit_mirror_queue_misc:initial_queue_node(Q, Node),
+                                      state              = live,
+                                      policy_version     = 0 })),
+
+    Node1 = case rabbit_queue_master_location_misc:get_location(Q)  of
+              {ok, Node0}  -> Node0;
+              {error, _}   -> Node
+            end,
+
+    Node1 = rabbit_mirror_queue_misc:initial_queue_node(Q, Node1),
     gen_server2:call(
-      rabbit_amqqueue_sup_sup:start_queue_process(Node, Q, declare),
+      rabbit_amqqueue_sup_sup:start_queue_process(Node1, Q, declare),
       {init, new}, infinity).
 
 internal_declare(Q, true) ->
@@ -385,7 +422,14 @@ not_found_or_absent_dirty(Name) ->
     end.
 
 with(Name, F, E) ->
+    with(Name, F, E, 2000).
+
+with(Name, F, E, RetriesLeft) ->
     case lookup(Name) of
+        {ok, Q = #amqqueue{}} when RetriesLeft =:= 0 ->
+            %% Something bad happened to that queue, we are bailing out
+            %% on processing current request.
+            E({absent, Q, timeout});
         {ok, Q = #amqqueue{state = crashed}} ->
             E({absent, Q, crashed});
         {ok, Q = #amqqueue{pid = QPid}} ->
@@ -397,8 +441,8 @@ with(Name, F, E) ->
             %% the retry loop.
             rabbit_misc:with_exit_handler(
               fun () -> false = rabbit_mnesia:is_process_alive(QPid),
-                        timer:sleep(25),
-                        with(Name, F, E)
+                        timer:sleep(30),
+                        with(Name, F, E, RetriesLeft - 1)
               end, fun () -> F(Q) end);
         {error, not_found} ->
             E(not_found_or_absent_dirty(Name))
@@ -468,7 +512,8 @@ declare_args() ->
      {<<"x-dead-letter-routing-key">>, fun check_dlxrk_arg/2},
      {<<"x-max-length">>,              fun check_non_neg_int_arg/2},
      {<<"x-max-length-bytes">>,        fun check_non_neg_int_arg/2},
-     {<<"x-max-priority">>,            fun check_non_neg_int_arg/2}].
+     {<<"x-max-priority">>,            fun check_non_neg_int_arg/2},
+     {<<"x-queue-mode">>,              fun check_queue_mode/2}].
 
 consume_args() -> [{<<"x-priority">>,              fun check_int_arg/2},
                    {<<"x-cancel-on-ha-failover">>, fun check_bool_arg/2}].
@@ -515,6 +560,14 @@ check_dlxrk_arg({longstr, _}, Args) ->
 check_dlxrk_arg({Type,    _}, _Args) ->
     {error, {unacceptable_type, Type}}.
 
+check_queue_mode({longstr, Val}, _Args) ->
+    case lists:member(Val, [<<"default">>, <<"lazy">>]) of
+        true  -> ok;
+        false -> {error, invalid_queue_mode}
+    end;
+check_queue_mode({Type,    _}, _Args) ->
+    {error, {unacceptable_type, Type}}.
+
 list() -> mnesia:dirty_match_object(rabbit_queue, #amqqueue{_ = '_'}).
 
 list(VHostPath) -> list(VHostPath, rabbit_queue).
@@ -580,6 +633,38 @@ info_all(VHostPath, Items) ->
     map(list(VHostPath), fun (Q) -> info(Q, Items) end) ++
         map(list_down(VHostPath), fun (Q) -> info_down(Q, Items, down) end).
 
+info_all_partial_emit(VHostPath, Items, all, Ref, AggregatorPid) ->
+    info_all_partial_emit(VHostPath, Items, online, Ref, AggregatorPid),
+    info_all_partial_emit(VHostPath, Items, offline, Ref, AggregatorPid);
+info_all_partial_emit(VHostPath, Items, online, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map_with_exit_handler(
+      AggregatorPid, Ref, fun(Q) -> info(Q, Items) end,
+      list(VHostPath),
+      continue);
+info_all_partial_emit(VHostPath, Items, offline, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map_with_exit_handler(
+      AggregatorPid, Ref, fun(Q) -> info_down(Q, Items, down) end,
+      list_down(VHostPath),
+      continue);
+info_all_partial_emit(VHostPath, Items, local, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map_with_exit_handler(
+      AggregatorPid, Ref, fun(Q) -> info(Q, Items) end,
+      list_local(VHostPath),
+      continue).
+
+info_all(VHostPath, Items, Filter, Ref, AggregatorPid) ->
+    info_all_partial_emit(VHostPath, Items, Filter, Ref, AggregatorPid),
+    %% Previous map(s) are incomplete, finalize emission
+    rabbit_control_misc:emitting_map(AggregatorPid, Ref, fun(_) -> no_op end, []).
+
+info_local(VHostPath) ->
+    map(list_local(VHostPath), fun (Q) -> info(Q, [name]) end).
+
+list_local(VHostPath) ->
+    [ Q || #amqqueue{state = State, pid=QPid} = Q <- list(VHostPath),
+           State =/= crashed,
+           node() =:= node(QPid) ].
+
 force_event_refresh(Ref) ->
     [gen_server2:cast(Q#amqqueue.pid,
                       {force_event_refresh, Ref}) || Q <- list()],
@@ -593,18 +678,33 @@ consumers(#amqqueue{ pid = QPid }) -> delegate:call(QPid, consumers).
 consumer_info_keys() -> ?CONSUMER_INFO_KEYS.
 
 consumers_all(VHostPath) ->
-    ConsumerInfoKeys=consumer_info_keys(),
+    ConsumerInfoKeys = consumer_info_keys(),
     lists:append(
       map(list(VHostPath),
-          fun (Q) ->
-              [lists:zip(
-                 ConsumerInfoKeys,
-                 [Q#amqqueue.name, ChPid, CTag, AckRequired, Prefetch, Args]) ||
-                  {ChPid, CTag, AckRequired, Prefetch, Args} <- consumers(Q)]
-          end)).
+          fun(Q) -> get_queue_consumer_info(Q, ConsumerInfoKeys) end)).
+
+consumers_all(VHostPath, Ref, AggregatorPid) ->
+    ConsumerInfoKeys = consumer_info_keys(),
+    rabbit_control_misc:emitting_map(
+      AggregatorPid, Ref,
+      fun(Q) -> get_queue_consumer_info(Q, ConsumerInfoKeys) end,
+      list(VHostPath)).
+
+get_queue_consumer_info(Q, ConsumerInfoKeys) ->
+    [lists:zip(ConsumerInfoKeys,
+               [Q#amqqueue.name, ChPid, CTag,
+                AckRequired, Prefetch, Args]) ||
+        {ChPid, CTag, AckRequired, Prefetch, Args} <- consumers(Q)].
 
 stat(#amqqueue{pid = QPid}) -> delegate:call(QPid, stat).
 
+pid_of(#amqqueue{pid = Pid}) -> Pid.
+pid_of(VHost, QueueName) ->
+  case lookup(rabbit_misc:r(VHost, queue, QueueName)) of
+    {ok, Q}                -> pid_of(Q);
+    {error, not_found} = E -> E
+  end.
+
 delete_immediately(QPids) ->
     [gen_server2:cast(QPid, delete_immediately) || QPid <- QPids],
     ok.
@@ -630,13 +730,23 @@ reject(QPid, Requeue, MsgIds, ChPid) ->
     delegate:cast(QPid, {reject, Requeue, MsgIds, ChPid}).
 
 notify_down_all(QPids, ChPid) ->
-    {_, Bads} = delegate:call(QPids, {notify_down, ChPid}),
-    case lists:filter(
-           fun ({_Pid, {exit, {R, _}, _}}) -> rabbit_misc:is_abnormal_exit(R);
-               ({_Pid, _})                 -> false
-           end, Bads) of
-        []    -> ok;
-        Bads1 -> {error, Bads1}
+    notify_down_all(QPids, ChPid, ?CHANNEL_OPERATION_TIMEOUT).
+
+notify_down_all(QPids, ChPid, Timeout) ->
+    case rpc:call(node(), delegate, call,
+                  [QPids, {notify_down, ChPid}], Timeout) of
+        {badrpc, timeout} -> {error, {channel_operation_timeout, Timeout}};
+        {badrpc, Reason}  -> {error, Reason};
+        {_, Bads} ->
+            case lists:filter(
+                   fun ({_Pid, {exit, {R, _}, _}}) ->
+                           rabbit_misc:is_abnormal_exit(R);
+                       ({_Pid, _})                 -> false
+                   end, Bads) of
+                []    -> ok;
+                Bads1 -> {error, Bads1}
+            end;
+        Error         -> {error, Error}
     end.
 
 activate_limit_all(QPids, ChPid) ->
@@ -771,11 +881,15 @@ set_ram_duration_target(QPid, Duration) ->
 set_maximum_since_use(QPid, Age) ->
     gen_server2:cast(QPid, {set_maximum_since_use, Age}).
 
-start_mirroring(QPid) -> ok = delegate:cast(QPid, start_mirroring).
-stop_mirroring(QPid)  -> ok = delegate:cast(QPid, stop_mirroring).
+update_mirroring(QPid) -> ok = delegate:cast(QPid, update_mirroring).
+
+sync_mirrors(#amqqueue{pid = QPid}) -> delegate:call(QPid, sync_mirrors);
+sync_mirrors(QPid)                  -> delegate:call(QPid, sync_mirrors).
+cancel_sync_mirrors(#amqqueue{pid = QPid}) -> delegate:call(QPid, cancel_sync_mirrors);
+cancel_sync_mirrors(QPid)                  -> delegate:call(QPid, cancel_sync_mirrors).
 
-sync_mirrors(QPid)        -> delegate:call(QPid, sync_mirrors).
-cancel_sync_mirrors(QPid) -> delegate:call(QPid, cancel_sync_mirrors).
+is_mirrored(Q) ->
+    rabbit_mirror_queue_misc:is_mirrored(Q).
 
 on_node_up(Node) ->
     ok = rabbit_misc:execute_mnesia_transaction(
@@ -821,11 +935,11 @@ on_node_down(Node) ->
     rabbit_misc:execute_mnesia_tx_with_tail(
       fun () -> QsDels =
                     qlc:e(qlc:q([{QName, delete_queue(QName)} ||
-                                    #amqqueue{name = QName, pid = Pid,
-                                              slave_pids = []}
+                                    #amqqueue{name = QName, pid = Pid} = Q
                                         <- mnesia:table(rabbit_queue),
-                                    node(Pid) == Node andalso
-                                    not rabbit_mnesia:is_process_alive(Pid)])),
+                                   not rabbit_amqqueue:is_mirrored(Q) andalso
+                                       node(Pid) == Node andalso
+                                       not rabbit_mnesia:is_process_alive(Pid)])),
                 {Qs, Dels} = lists:unzip(QsDels),
                 T = rabbit_binding:process_deletions(
                       lists:foldl(fun rabbit_binding:combine_deletions/2,
similarity index 91%
rename from rabbitmq-server/src/rabbit_auth_backend_dummy.erl
rename to deps/rabbit_common/src/rabbit_auth_backend_dummy.erl
index ee6dabea1424d0c4e950bc0f25e33bf8220dcb79..8ac19299f9c09a041f8d85b6f0d93d03335a8d66 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_backend_dummy).
 -export([user_login_authentication/2, user_login_authorization/1,
          check_vhost_access/3, check_resource_access/3]).
 
--ifdef(use_specs).
-
--spec(user/0 :: () -> rabbit_types:user()).
-
--endif.
+-spec user() -> rabbit_types:user().
 
 %% A user to be used by the direct client when permission checks are
 %% not needed. This user can do anything AMQPish.
similarity index 61%
rename from rabbitmq-server/src/rabbit_auth_backend_internal.erl
rename to deps/rabbit_common/src/rabbit_auth_backend_internal.erl
index 2b2a0ba20e29eed07187ff3d9edaf8ae646a8081..fdd954a4e0c4eea2dafdd132a9f201a01b4e1de9 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_backend_internal).
 
 -export([add_user/2, delete_user/1, lookup_user/1,
          change_password/2, clear_password/1,
-         hash_password/1, change_password_hash/2,
+         hash_password/2, change_password_hash/2, change_password_hash/3,
          set_tags/2, set_permissions/5, clear_permissions/2]).
 -export([user_info_keys/0, perms_info_keys/0,
          user_perms_info_keys/0, vhost_perms_info_keys/0,
          user_vhost_perms_info_keys/0,
-         list_users/0, list_permissions/0,
-         list_user_permissions/1, list_vhost_permissions/1,
+         list_users/0, list_users/2, list_permissions/0,
+         list_user_permissions/1, list_user_permissions/3,
+         list_vhost_permissions/1, list_vhost_permissions/3,
          list_user_vhost_permissions/2]).
 
+%% for testing
+-export([hashing_module_for_user/1]).
+
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--type(regexp() :: binary()).
-
--spec(add_user/2 :: (rabbit_types:username(), rabbit_types:password()) -> 'ok').
--spec(delete_user/1 :: (rabbit_types:username()) -> 'ok').
--spec(lookup_user/1 :: (rabbit_types:username())
-                       -> rabbit_types:ok(rabbit_types:internal_user())
-                              | rabbit_types:error('not_found')).
--spec(change_password/2 :: (rabbit_types:username(), rabbit_types:password())
-                           -> 'ok').
--spec(clear_password/1 :: (rabbit_types:username()) -> 'ok').
--spec(hash_password/1 :: (rabbit_types:password())
-                         -> rabbit_types:password_hash()).
--spec(change_password_hash/2 :: (rabbit_types:username(),
-                                 rabbit_types:password_hash()) -> 'ok').
--spec(set_tags/2 :: (rabbit_types:username(), [atom()]) -> 'ok').
--spec(set_permissions/5 ::(rabbit_types:username(), rabbit_types:vhost(),
-                           regexp(), regexp(), regexp()) -> 'ok').
--spec(clear_permissions/2 :: (rabbit_types:username(), rabbit_types:vhost())
-                             -> 'ok').
--spec(user_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(perms_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(user_perms_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(vhost_perms_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(user_vhost_perms_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(list_users/0 :: () -> [rabbit_types:infos()]).
--spec(list_permissions/0 :: () -> [rabbit_types:infos()]).
--spec(list_user_permissions/1 ::
-        (rabbit_types:username()) -> [rabbit_types:infos()]).
--spec(list_vhost_permissions/1 ::
-        (rabbit_types:vhost()) -> [rabbit_types:infos()]).
--spec(list_user_vhost_permissions/2 ::
-        (rabbit_types:username(), rabbit_types:vhost())
-        -> [rabbit_types:infos()]).
-
--endif.
+-type regexp() :: binary().
+
+-spec add_user(rabbit_types:username(), rabbit_types:password()) -> 'ok'.
+-spec delete_user(rabbit_types:username()) -> 'ok'.
+-spec lookup_user
+        (rabbit_types:username()) ->
+            rabbit_types:ok(rabbit_types:internal_user()) |
+            rabbit_types:error('not_found').
+-spec change_password
+        (rabbit_types:username(), rabbit_types:password()) -> 'ok'.
+-spec clear_password(rabbit_types:username()) -> 'ok'.
+-spec hash_password
+        (module(), rabbit_types:password()) -> rabbit_types:password_hash().
+-spec change_password_hash
+        (rabbit_types:username(), rabbit_types:password_hash()) -> 'ok'.
+-spec set_tags(rabbit_types:username(), [atom()]) -> 'ok'.
+-spec set_permissions
+        (rabbit_types:username(), rabbit_types:vhost(), regexp(), regexp(),
+         regexp()) ->
+            'ok'.
+-spec clear_permissions
+        (rabbit_types:username(), rabbit_types:vhost()) -> 'ok'.
+-spec user_info_keys() -> rabbit_types:info_keys().
+-spec perms_info_keys() -> rabbit_types:info_keys().
+-spec user_perms_info_keys() -> rabbit_types:info_keys().
+-spec vhost_perms_info_keys() -> rabbit_types:info_keys().
+-spec user_vhost_perms_info_keys() -> rabbit_types:info_keys().
+-spec list_users() -> [rabbit_types:infos()].
+-spec list_users(reference(), pid()) -> 'ok'.
+-spec list_permissions() -> [rabbit_types:infos()].
+-spec list_user_permissions
+        (rabbit_types:username()) -> [rabbit_types:infos()].
+-spec list_user_permissions
+        (rabbit_types:username(), reference(), pid()) -> 'ok'.
+-spec list_vhost_permissions
+        (rabbit_types:vhost()) -> [rabbit_types:infos()].
+-spec list_vhost_permissions
+        (rabbit_types:vhost(), reference(), pid()) -> 'ok'.
+-spec list_user_vhost_permissions
+        (rabbit_types:username(), rabbit_types:vhost()) -> [rabbit_types:infos()].
 
 %%----------------------------------------------------------------------------
 %% Implementation of rabbit_auth_backend
 
+%% Returns a password hashing module for the user record provided. If
+%% there is no information in the record, we consider it to be legacy
+%% (inserted by a version older than 3.6.0) and fall back to MD5, the
+%% now obsolete hashing function.
+hashing_module_for_user(#internal_user{
+    hashing_algorithm = ModOrUndefined}) ->
+        rabbit_password:hashing_mod(ModOrUndefined).
+
 user_login_authentication(Username, []) ->
     internal_check_user_login(Username, fun(_) -> true end);
-user_login_authentication(Username, [{password, Cleartext}]) ->
-    internal_check_user_login(
-      Username,
-      fun (#internal_user{password_hash = <<Salt:4/binary, Hash/binary>>}) ->
-              Hash =:= salted_md5(Salt, Cleartext);
-          (#internal_user{}) ->
-              false
-      end);
 user_login_authentication(Username, AuthProps) ->
-    exit({unknown_auth_props, Username, AuthProps}).
+    case lists:keyfind(password, 1, AuthProps) of
+        {password, Cleartext} ->
+            internal_check_user_login(
+              Username,
+              fun (#internal_user{
+                        password_hash = <<Salt:4/binary, Hash/binary>>
+                    } = U) ->
+                  Hash =:= rabbit_password:salted_hash(
+                      hashing_module_for_user(U), Salt, Cleartext);
+                  (#internal_user{}) ->
+                      false
+              end);
+        false -> exit({unknown_auth_props, Username, AuthProps})
+    end.
 
 user_login_authorization(Username) ->
     case user_login_authentication(Username, []) of
@@ -147,17 +167,19 @@ permission_index(read)      -> #permission.read.
 
 add_user(Username, Password) ->
     rabbit_log:info("Creating user '~s'~n", [Username]),
+    %% hash_password will pick the hashing function configured for us
+    %% but we also need to store a hint as part of the record, so we
+    %% retrieve it here one more time
+    HashingMod = rabbit_password:hashing_mod(),
+    User = #internal_user{username          = Username,
+                          password_hash     = hash_password(HashingMod, Password),
+                          tags              = [],
+                          hashing_algorithm = HashingMod},
     R = rabbit_misc:execute_mnesia_transaction(
           fun () ->
                   case mnesia:wread({rabbit_user, Username}) of
                       [] ->
-                          ok = mnesia:write(
-                                 rabbit_user,
-                                 #internal_user{username = Username,
-                                                password_hash =
-                                                    hash_password(Password),
-                                                tags = []},
-                                 write);
+                          ok = mnesia:write(rabbit_user, User, write);
                       _ ->
                           mnesia:abort({user_already_exists, Username})
                   end
@@ -191,7 +213,11 @@ lookup_user(Username) ->
 
 change_password(Username, Password) ->
     rabbit_log:info("Changing password for '~s'~n", [Username]),
-    R = change_password_hash(Username, hash_password(Password)),
+    HashingAlgorithm = rabbit_password:hashing_mod(),
+    R = change_password_hash(Username,
+                             hash_password(rabbit_password:hashing_mod(),
+                                           Password),
+                             HashingAlgorithm),
     rabbit_event:notify(user_password_changed, [{name, Username}]),
     R.
 
@@ -201,24 +227,20 @@ clear_password(Username) ->
     rabbit_event:notify(user_password_cleared, [{name, Username}]),
     R.
 
-hash_password(Cleartext) ->
-    {A1,A2,A3} = now(),
-    random:seed(A1, A2, A3),
-    Salt = random:uniform(16#ffffffff),
-    SaltBin = <<Salt:32>>,
-    Hash = salted_md5(SaltBin, Cleartext),
-    <<SaltBin/binary, Hash/binary>>.
+hash_password(HashingMod, Cleartext) ->
+    rabbit_password:hash(HashingMod, Cleartext).
 
 change_password_hash(Username, PasswordHash) ->
+    change_password_hash(Username, PasswordHash, rabbit_password:hashing_mod()).
+
+
+change_password_hash(Username, PasswordHash, HashingAlgorithm) ->
     update_user(Username, fun(User) ->
                                   User#internal_user{
-                                    password_hash = PasswordHash }
+                                    password_hash     = PasswordHash,
+                                    hashing_algorithm = HashingAlgorithm }
                           end).
 
-salted_md5(Salt, Cleartext) ->
-    Salted = <<Salt/binary, Cleartext/binary>>,
-    erlang:md5(Salted).
-
 set_tags(Username, Tags) ->
     rabbit_log:info("Setting user tags for user '~s' to ~p~n",
                     [Username, Tags]),
@@ -255,7 +277,7 @@ set_permissions(Username, VHostPath, ConfigurePerm, WritePerm, ReadPerm) ->
                                                 read      = ReadPerm}},
                              write)
             end)),
-    rabbit_event:notify(permission_created, [{user,      Username}, 
+    rabbit_event:notify(permission_created, [{user,      Username},
                                              {vhost,     VHostPath},
                                              {configure, ConfigurePerm},
                                              {write,     WritePerm},
@@ -299,26 +321,28 @@ user_perms_info_keys()       -> [vhost | ?PERMS_INFO_KEYS].
 user_vhost_perms_info_keys() -> ?PERMS_INFO_KEYS.
 
 list_users() ->
-    [[{user, Username}, {tags, Tags}] ||
-        #internal_user{username = Username, tags = Tags} <-
-            mnesia:dirty_match_object(rabbit_user, #internal_user{_ = '_'})].
+    [extract_internal_user_params(U) ||
+        U <- mnesia:dirty_match_object(rabbit_user, #internal_user{_ = '_'})].
+
+list_users(Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(
+      AggregatorPid, Ref,
+      fun(U) -> extract_internal_user_params(U) end,
+      mnesia:dirty_match_object(rabbit_user, #internal_user{_ = '_'})).
 
 list_permissions() ->
     list_permissions(perms_info_keys(), match_user_vhost('_', '_')).
 
 list_permissions(Keys, QueryThunk) ->
-    [filter_props(Keys, [{user,      Username},
-                         {vhost,     VHostPath},
-                         {configure, ConfigurePerm},
-                         {write,     WritePerm},
-                         {read,      ReadPerm}]) ||
-        #user_permission{user_vhost = #user_vhost{username     = Username,
-                                                  virtual_host = VHostPath},
-                         permission = #permission{ configure = ConfigurePerm,
-                                                   write     = WritePerm,
-                                                   read      = ReadPerm}} <-
-            %% TODO: use dirty ops instead
-            rabbit_misc:execute_mnesia_transaction(QueryThunk)].
+    [extract_user_permission_params(Keys, U) ||
+        %% TODO: use dirty ops instead
+        U <- rabbit_misc:execute_mnesia_transaction(QueryThunk)].
+
+list_permissions(Keys, QueryThunk, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map(
+      AggregatorPid, Ref, fun(U) -> extract_user_permission_params(Keys, U) end,
+      %% TODO: use dirty ops instead
+      rabbit_misc:execute_mnesia_transaction(QueryThunk)).
 
 filter_props(Keys, Props) -> [T || T = {K, _} <- Props, lists:member(K, Keys)].
 
@@ -327,17 +351,46 @@ list_user_permissions(Username) ->
       user_perms_info_keys(),
       rabbit_misc:with_user(Username, match_user_vhost(Username, '_'))).
 
+list_user_permissions(Username, Ref, AggregatorPid) ->
+    list_permissions(
+      user_perms_info_keys(),
+      rabbit_misc:with_user(Username, match_user_vhost(Username, '_')),
+      Ref, AggregatorPid).
+
 list_vhost_permissions(VHostPath) ->
     list_permissions(
       vhost_perms_info_keys(),
       rabbit_vhost:with(VHostPath, match_user_vhost('_', VHostPath))).
 
+list_vhost_permissions(VHostPath, Ref, AggregatorPid) ->
+    list_permissions(
+      vhost_perms_info_keys(),
+      rabbit_vhost:with(VHostPath, match_user_vhost('_', VHostPath)),
+      Ref, AggregatorPid).
+
 list_user_vhost_permissions(Username, VHostPath) ->
     list_permissions(
       user_vhost_perms_info_keys(),
       rabbit_misc:with_user_and_vhost(
         Username, VHostPath, match_user_vhost(Username, VHostPath))).
 
+extract_user_permission_params(Keys, #user_permission{
+                                        user_vhost =
+                                            #user_vhost{username     = Username,
+                                                        virtual_host = VHostPath},
+                                        permission = #permission{
+                                                        configure = ConfigurePerm,
+                                                        write     = WritePerm,
+                                                        read      = ReadPerm}}) ->
+    filter_props(Keys, [{user,      Username},
+                        {vhost,     VHostPath},
+                        {configure, ConfigurePerm},
+                        {write,     WritePerm},
+                        {read,      ReadPerm}]).
+
+extract_internal_user_params(#internal_user{username = Username, tags = Tags}) ->
+    [{user, Username}, {tags, Tags}].
+
 match_user_vhost(Username, VHostPath) ->
     fun () -> mnesia:match_object(
                 rabbit_user_permission,
similarity index 85%
rename from rabbitmq-server/src/rabbit_auth_mechanism.erl
rename to deps/rabbit_common/src/rabbit_auth_mechanism.erl
index 78e3e7dd4b7befb40d45d8471da7ee15b51c0c1d..4c41502b065029de3132b33758dea9011476ee99 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_mechanism).
 
--ifdef(use_specs).
-
 %% A description.
 -callback description() -> [proplists:property()].
 
     {'challenge', binary(), any()} |
     {'protocol_error', string(), [any()]} |
     {'refused', rabbit_types:username() | none, string(), [any()]}.
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{description, 0}, {should_offer, 1}, {init, 1}, {handle_response, 2}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 84%
rename from rabbitmq-server/src/rabbit_authn_backend.erl
rename to deps/rabbit_common/src/rabbit_authn_backend.erl
index b9cb0d36693a44b1cf57f493bddd5807fd0e2f08..45f3c46109b7add68930d9afb9abf46ef0bd974b 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_authn_backend).
 
 -include("rabbit.hrl").
 
--ifdef(use_specs).
-
 %% Check a user can log in, given a username and a proplist of
 %% authentication information (e.g. [{password, Password}]). If your
 %% backend is not to be used for authentication, this should always
     {'ok', rabbit_types:auth_user()} |
     {'refused', string(), [any()]} |
     {'error', any()}.
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{user_login_authentication, 2}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 88%
rename from rabbitmq-server/src/rabbit_authz_backend.erl
rename to deps/rabbit_common/src/rabbit_authz_backend.erl
index 495a79695d86149f285c54a337c8c6b4f27748de..4315aaa9b169b12884a68b554d1f94b3a36e6bee 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_authz_backend).
 
 -include("rabbit.hrl").
 
--ifdef(use_specs).
-
 %% Check a user can log in, when this backend is being used for
 %% authorisation only. Authentication has already taken place
 %% successfully, but we need to check that the user exists in this
                                 rabbit_types:r(atom()),
                                 rabbit_access_control:permission_atom()) ->
     boolean() | {'error', any()}.
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{user_login_authorization, 1},
-     {check_vhost_access, 3}, {check_resource_access, 3}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 86%
rename from rabbitmq-server/src/rabbit_backing_queue.erl
rename to deps/rabbit_common/src/rabbit_backing_queue.erl
index d6cd3ca43d64d05a6eea205777d55bcf8c0783cd..bb4d03acced4348975addbef38e7173010e59c0b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_backing_queue).
                     messages_unacknowledged_ram, messages_persistent,
                     message_bytes, message_bytes_ready,
                     message_bytes_unacknowledged, message_bytes_ram,
-                    message_bytes_persistent,
+                    message_bytes_persistent, head_message_timestamp,
                     disk_reads, disk_writes, backing_queue_status]).
 
--ifdef(use_specs).
-
 %% We can't specify a per-queue ack/state with callback signatures
--type(ack()   :: any()).
--type(state() :: any()).
-
--type(flow() :: 'flow' | 'noflow').
--type(msg_ids() :: [rabbit_types:msg_id()]).
--type(fetch_result(Ack) ::
-        ('empty' | {rabbit_types:basic_message(), boolean(), Ack})).
--type(drop_result(Ack) ::
-        ('empty' | {rabbit_types:msg_id(), Ack})).
--type(recovery_terms() :: [term()] | 'non_clean_shutdown').
--type(recovery_info() :: 'new' | recovery_terms()).
--type(purged_msg_count() :: non_neg_integer()).
--type(async_callback() ::
-        fun ((atom(), fun ((atom(), state()) -> state())) -> 'ok')).
--type(duration() :: ('undefined' | 'infinity' | number())).
-
--type(msg_fun(A) :: fun ((rabbit_types:basic_message(), ack(), A) -> A)).
--type(msg_pred() :: fun ((rabbit_types:message_properties()) -> boolean())).
-
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
+-type ack()   :: any().
+-type state() :: any().
+
+-type flow() :: 'flow' | 'noflow'.
+-type msg_ids() :: [rabbit_types:msg_id()].
+-type publish() :: {rabbit_types:basic_message(),
+                    rabbit_types:message_properties(), boolean()}.
+-type delivered_publish() :: {rabbit_types:basic_message(),
+                              rabbit_types:message_properties()}.
+-type fetch_result(Ack) ::
+        ('empty' | {rabbit_types:basic_message(), boolean(), Ack}).
+-type drop_result(Ack) ::
+        ('empty' | {rabbit_types:msg_id(), Ack}).
+-type recovery_terms() :: [term()] | 'non_clean_shutdown'.
+-type recovery_info() :: 'new' | recovery_terms().
+-type purged_msg_count() :: non_neg_integer().
+-type async_callback() ::
+        fun ((atom(), fun ((atom(), state()) -> state())) -> 'ok').
+-type duration() :: ('undefined' | 'infinity' | number()).
+
+-type msg_fun(A) :: fun ((rabbit_types:basic_message(), ack(), A) -> A).
+-type msg_pred() :: fun ((rabbit_types:message_properties()) -> boolean()).
+
+-type queue_mode() :: atom().
+
+-spec info_keys() -> rabbit_types:info_keys().
 
 %% Called on startup with a list of durable queue names. The queues
 %% aren't being started at this point, but this call allows the
                   rabbit_types:message_properties(), boolean(), pid(), flow(),
                   state()) -> state().
 
+%% Like publish/6 but for batches of publishes.
+-callback batch_publish([publish()], pid(), flow(), state()) -> state().
+
 %% Called for messages which have already been passed straight
 %% out to a client. The queue will be empty for these calls
 %% (i.e. saves the round trip through the backing queue).
                             state())
                            -> {ack(), state()}.
 
+%% Like publish_delivered/5 but for batches of publishes.
+-callback batch_publish_delivered([delivered_publish()], pid(), flow(),
+                                  state())
+                                 -> {[ack()], state()}.
+
 %% Called to inform the BQ about messages which have reached the
 %% queue, but are not going to be further passed to BQ.
 -callback discard(rabbit_types:msg_id(), pid(), flow(), state()) -> state().
 -callback is_duplicate(rabbit_types:basic_message(), state())
                       -> {boolean(), state()}.
 
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{start, 1}, {stop, 0}, {init, 3}, {terminate, 2},
-     {delete_and_terminate, 2}, {delete_crashed, 1}, {purge, 1},
-     {purge_acks, 1}, {publish, 6},
-     {publish_delivered, 5}, {discard, 4}, {drain_confirmed, 1},
-     {dropwhile, 2}, {fetchwhile, 4}, {fetch, 2},
-     {drop, 2}, {ack, 2}, {requeue, 2}, {ackfold, 4}, {fold, 3}, {len, 1},
-     {is_empty, 1}, {depth, 1}, {set_ram_duration_target, 2},
-     {ram_duration, 1}, {needs_timeout, 1}, {timeout, 1},
-     {handle_pre_hibernate, 1}, {resume, 1}, {msg_rates, 1},
-     {info, 2}, {invoke, 3}, {is_duplicate, 2}] ;
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
+-callback set_queue_mode(queue_mode(), state()) -> state().
+
+-callback zip_msgs_and_acks(delivered_publish(),
+                            [ack()], Acc, state())
+                           -> Acc.
 
 info_keys() -> ?INFO_KEYS.
similarity index 82%
rename from rabbitmq-server/src/rabbit_basic.erl
rename to deps/rabbit_common/src/rabbit_basic.erl
index efc5ce27452027c44c033a75831c1de030a12853..14f0a4e855dbc0dc33649ce43b845c5fa57d81cf 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_basic).
 
 -export([publish/4, publish/5, publish/1,
          message/3, message/4, properties/1, prepend_table_header/3,
-         extract_headers/1, map_headers/2, delivery/4, header_routes/1,
-         parse_expiration/1, header/2, header/3]).
+         extract_headers/1, extract_timestamp/1, map_headers/2, delivery/4,
+         header_routes/1, parse_expiration/1, header/2, header/3]).
 -export([build_content/2, from_content/1, msg_size/1, maybe_gc_large_msg/1]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type properties_input() ::
+        rabbit_framing:amqp_property_record() | [{atom(), any()}].
+-type publish_result() ::
+        {ok, [pid()]} | rabbit_types:error('not_found').
+-type header() :: any().
+-type headers() :: rabbit_framing:amqp_table() | 'undefined'.
 
--type(properties_input() ::
-        (rabbit_framing:amqp_property_record() | [{atom(), any()}])).
--type(publish_result() ::
-        ({ok, [pid()]} | rabbit_types:error('not_found'))).
--type(header() :: any()).
--type(headers() :: rabbit_framing:amqp_table() | 'undefined').
+-type exchange_input() :: rabbit_types:exchange() | rabbit_exchange:name().
+-type body_input() :: binary() | [binary()].
 
--type(exchange_input() :: (rabbit_types:exchange() | rabbit_exchange:name())).
--type(body_input() :: (binary() | [binary()])).
-
--spec(publish/4 ::
+-spec publish
         (exchange_input(), rabbit_router:routing_key(), properties_input(),
-         body_input()) -> publish_result()).
--spec(publish/5 ::
+         body_input()) ->
+            publish_result().
+-spec publish
         (exchange_input(), rabbit_router:routing_key(), boolean(),
-         properties_input(), body_input()) -> publish_result()).
--spec(publish/1 ::
-        (rabbit_types:delivery()) -> publish_result()).
--spec(delivery/4 ::
+         properties_input(), body_input()) ->
+            publish_result().
+-spec publish(rabbit_types:delivery()) -> publish_result().
+-spec delivery
         (boolean(), boolean(), rabbit_types:message(), undefined | integer()) ->
-                         rabbit_types:delivery()).
--spec(message/4 ::
-        (rabbit_exchange:name(), rabbit_router:routing_key(),
-         properties_input(), binary()) -> rabbit_types:message()).
--spec(message/3 ::
+            rabbit_types:delivery().
+-spec message
+        (rabbit_exchange:name(), rabbit_router:routing_key(), properties_input(),
+         binary()) ->
+            rabbit_types:message().
+-spec message
         (rabbit_exchange:name(), rabbit_router:routing_key(),
          rabbit_types:decoded_content()) ->
-                        rabbit_types:ok_or_error2(rabbit_types:message(), any())).
--spec(properties/1 ::
-        (properties_input()) -> rabbit_framing:amqp_property_record()).
-
--spec(prepend_table_header/3 ::
-        (binary(), rabbit_framing:amqp_table(), headers()) -> headers()).
+            rabbit_types:ok_or_error2(rabbit_types:message(), any()).
+-spec properties
+        (properties_input()) -> rabbit_framing:amqp_property_record().
 
--spec(header/2 ::
-        (header(), headers()) -> 'undefined' | any()).
--spec(header/3 ::
-        (header(), headers(), any()) -> 'undefined' | any()).
+-spec prepend_table_header
+        (binary(), rabbit_framing:amqp_table(), headers()) -> headers().
 
--spec(extract_headers/1 :: (rabbit_types:content()) -> headers()).
+-spec header(header(), headers()) -> 'undefined' | any().
+-spec header(header(), headers(), any()) -> 'undefined' | any().
 
--spec(map_headers/2 :: (fun((headers()) -> headers()), rabbit_types:content())
-                       -> rabbit_types:content()).
+-spec extract_headers(rabbit_types:content()) -> headers().
 
--spec(header_routes/1 ::
-        (undefined | rabbit_framing:amqp_table()) -> [string()]).
--spec(build_content/2 :: (rabbit_framing:amqp_property_record(),
-                          binary() | [binary()]) -> rabbit_types:content()).
--spec(from_content/1 :: (rabbit_types:content()) ->
-                             {rabbit_framing:amqp_property_record(), binary()}).
--spec(parse_expiration/1 ::
-        (rabbit_framing:amqp_property_record())
-        -> rabbit_types:ok_or_error2('undefined' | non_neg_integer(), any())).
+-spec map_headers
+        (fun((headers()) -> headers()), rabbit_types:content()) ->
+            rabbit_types:content().
 
--spec(msg_size/1 :: (rabbit_types:content() | rabbit_types:message()) ->
-                         non_neg_integer()).
+-spec header_routes(undefined | rabbit_framing:amqp_table()) -> [string()].
+-spec build_content
+        (rabbit_framing:amqp_property_record(), binary() | [binary()]) ->
+            rabbit_types:content().
+-spec from_content
+        (rabbit_types:content()) ->
+            {rabbit_framing:amqp_property_record(), binary()}.
+-spec parse_expiration
+        (rabbit_framing:amqp_property_record()) ->
+            rabbit_types:ok_or_error2('undefined' | non_neg_integer(), any()).
 
--spec(maybe_gc_large_msg/1 ::
-        (rabbit_types:content() | rabbit_types:message()) -> non_neg_integer()).
+-spec msg_size
+        (rabbit_types:content() | rabbit_types:message()) -> non_neg_integer().
 
--endif.
+-spec maybe_gc_large_msg
+        (rabbit_types:content() | rabbit_types:message()) -> non_neg_integer().
 
 %%----------------------------------------------------------------------------
 
@@ -249,6 +247,11 @@ extract_headers(Content) ->
         rabbit_binary_parser:ensure_content_decoded(Content),
     Headers.
 
+extract_timestamp(Content) ->
+    #content{properties = #'P_basic'{timestamp = Timestamp}} =
+        rabbit_binary_parser:ensure_content_decoded(Content),
+    Timestamp.
+
 map_headers(F, Content) ->
     Content1 = rabbit_binary_parser:ensure_content_decoded(Content),
     #content{properties = #'P_basic'{headers = Headers} = Props} = Content1,
similarity index 82%
rename from rabbitmq-server/src/rabbit_binary_generator.erl
rename to deps/rabbit_common/src/rabbit_binary_generator.erl
index 34f2d601aa20d0995f61eaed24175387438e90fe..95d06ff5f52acbecfdecd84f0b9c1daa360b26ca 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_binary_generator).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
+-type frame() :: [binary()].
 
--type(frame() :: [binary()]).
-
--spec(build_simple_method_frame/3 ::
+-spec build_simple_method_frame
         (rabbit_channel:channel_number(), rabbit_framing:amqp_method_record(),
-         rabbit_types:protocol())
-        -> frame()).
--spec(build_simple_content_frames/4 ::
+         rabbit_types:protocol()) ->
+            frame().
+-spec build_simple_content_frames
         (rabbit_channel:channel_number(), rabbit_types:content(),
-         non_neg_integer(), rabbit_types:protocol())
-        -> [frame()]).
--spec(build_heartbeat_frame/0 :: () -> frame()).
--spec(generate_table/1 :: (rabbit_framing:amqp_table()) -> binary()).
--spec(check_empty_frame_size/0 :: () -> 'ok').
--spec(ensure_content_encoded/2 ::
+         non_neg_integer(), rabbit_types:protocol()) ->
+            [frame()].
+-spec build_heartbeat_frame() -> frame().
+-spec generate_table(rabbit_framing:amqp_table()) -> binary().
+-spec check_empty_frame_size() -> 'ok'.
+-spec ensure_content_encoded
         (rabbit_types:content(), rabbit_types:protocol()) ->
-                                       rabbit_types:encoded_content()).
--spec(clear_encoded_content/1 ::
-        (rabbit_types:content()) -> rabbit_types:unencoded_content()).
--spec(map_exception/3 :: (rabbit_channel:channel_number(),
-                          rabbit_types:amqp_error() | any(),
-                          rabbit_types:protocol()) ->
-                              {rabbit_channel:channel_number(),
-                               rabbit_framing:amqp_method_record()}).
-
--endif.
+            rabbit_types:encoded_content().
+-spec clear_encoded_content
+        (rabbit_types:content()) ->
+            rabbit_types:unencoded_content().
+-spec map_exception
+        (rabbit_channel:channel_number(), rabbit_types:amqp_error() | any(),
+         rabbit_types:protocol()) ->
+            {rabbit_channel:channel_number(),
+             rabbit_framing:amqp_method_record()}.
 
 %%----------------------------------------------------------------------------
 
@@ -119,20 +116,23 @@ create_frame(TypeInt, ChannelInt, Payload) ->
 table_field_to_binary({FName, T, V}) ->
     [short_string_to_binary(FName) | field_value_to_binary(T, V)].
 
-field_value_to_binary(longstr,   V) -> [$S | long_string_to_binary(V)];
-field_value_to_binary(signedint, V) -> [$I, <<V:32/signed>>];
-field_value_to_binary(decimal,   V) -> {Before, After} = V,
+field_value_to_binary(longstr,       V) -> [$S | long_string_to_binary(V)];
+field_value_to_binary(signedint,     V) -> [$I, <<V:32/signed>>];
+field_value_to_binary(decimal,       V) -> {Before, After} = V,
                                        [$D, Before, <<After:32>>];
-field_value_to_binary(timestamp, V) -> [$T, <<V:64>>];
-field_value_to_binary(table,     V) -> [$F | table_to_binary(V)];
-field_value_to_binary(array,     V) -> [$A | array_to_binary(V)];
-field_value_to_binary(byte,      V) -> [$b, <<V:8/signed>>];
-field_value_to_binary(double,    V) -> [$d, <<V:64/float>>];
-field_value_to_binary(float,     V) -> [$f, <<V:32/float>>];
-field_value_to_binary(long,      V) -> [$l, <<V:64/signed>>];
-field_value_to_binary(short,     V) -> [$s, <<V:16/signed>>];
-field_value_to_binary(bool,      V) -> [$t, if V -> 1; true -> 0 end];
-field_value_to_binary(binary,    V) -> [$x | long_string_to_binary(V)];
+field_value_to_binary(timestamp,     V) -> [$T, <<V:64>>];
+field_value_to_binary(table,         V) -> [$F | table_to_binary(V)];
+field_value_to_binary(array,         V) -> [$A | array_to_binary(V)];
+field_value_to_binary(byte,          V) -> [$b, <<V:8/signed>>];
+field_value_to_binary(double,        V) -> [$d, <<V:64/float>>];
+field_value_to_binary(float,         V) -> [$f, <<V:32/float>>];
+field_value_to_binary(long,          V) -> [$l, <<V:64/signed>>];
+field_value_to_binary(short,         V) -> [$s, <<V:16/signed>>];
+field_value_to_binary(bool,          V) -> [$t, if V -> 1; true -> 0 end];
+field_value_to_binary(binary,        V) -> [$x | long_string_to_binary(V)];
+field_value_to_binary(unsignedbyte,  V) -> [$B, <<V:8/unsigned>>];
+field_value_to_binary(unsignedshort, V) -> [$u, <<V:16/unsigned>>];
+field_value_to_binary(unsignedint,   V) -> [$i, <<V:32/unsigned>>];
 field_value_to_binary(void,     _V) -> [$V].
 
 table_to_binary(Table) when is_list(Table) ->
similarity index 87%
rename from rabbitmq-server/src/rabbit_binary_parser.erl
rename to deps/rabbit_common/src/rabbit_binary_parser.erl
index 8b3bf3e6f568841521dec054ed0437476ea2419f..b84e1203f9b1fa9ea76943be1596c63cf1551c94 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_binary_parser).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(parse_table/1 :: (binary()) -> rabbit_framing:amqp_table()).
--spec(ensure_content_decoded/1 ::
-        (rabbit_types:content()) -> rabbit_types:decoded_content()).
--spec(clear_decoded_content/1 ::
-        (rabbit_types:content()) -> rabbit_types:undecoded_content()).
--spec(validate_utf8/1 :: (binary()) -> 'ok' | 'error').
--spec(assert_utf8/1 :: (binary()) -> 'ok').
-
--endif.
+-spec parse_table(binary()) -> rabbit_framing:amqp_table().
+-spec ensure_content_decoded
+        (rabbit_types:content()) ->
+            rabbit_types:decoded_content().
+-spec clear_decoded_content
+        (rabbit_types:content()) ->
+            rabbit_types:undecoded_content().
+-spec validate_utf8(binary()) -> 'ok' | 'error'.
+-spec assert_utf8(binary()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -53,17 +51,25 @@ parse_table(<<NLen:8/unsigned, NameString:NLen/binary,
               $S, VLen:32/unsigned, Value:VLen/binary, Rest/binary>>) ->
     [{NameString, longstr, Value} | parse_table(Rest)];
 
-?SIMPLE_PARSE_TABLE($I, Value:32/signed,   signedint);
 ?SIMPLE_PARSE_TABLE($T, Value:64/unsigned, timestamp);
 
 parse_table(<<>>) ->
     [];
 
 ?SIMPLE_PARSE_TABLE($b, Value:8/signed,  byte);
+?SIMPLE_PARSE_TABLE($B, Value:8/unsigned, unsignedbyte);
+
+?SIMPLE_PARSE_TABLE($s, Value:16/signed, short);
+?SIMPLE_PARSE_TABLE($u, Value:16/unsigned, unsignedshort);
+
+?SIMPLE_PARSE_TABLE($I, Value:32/signed,   signedint);
+?SIMPLE_PARSE_TABLE($i, Value:32/unsigned, unsignedint);
+
 ?SIMPLE_PARSE_TABLE($d, Value:64/float, double);
 ?SIMPLE_PARSE_TABLE($f, Value:32/float, float);
+
 ?SIMPLE_PARSE_TABLE($l, Value:64/signed, long);
-?SIMPLE_PARSE_TABLE($s, Value:16/signed, short);
+
 
 parse_table(<<NLen:8/unsigned, NameString:NLen/binary,
               $t, Value:8/unsigned, Rest/binary>>) ->
@@ -96,17 +102,26 @@ parse_table(<<NLen:8/unsigned, NameString:NLen/binary,
 parse_array(<<$S, VLen:32/unsigned, Value:VLen/binary, Rest/binary>>) ->
     [{longstr, Value} | parse_array(Rest)];
 
-?SIMPLE_PARSE_ARRAY($I, Value:32/signed,   signedint);
 ?SIMPLE_PARSE_ARRAY($T, Value:64/unsigned, timestamp);
 
 parse_array(<<>>) ->
     [];
 
 ?SIMPLE_PARSE_ARRAY($b, Value:8/signed,  byte);
+?SIMPLE_PARSE_ARRAY($B, Value:8/unsigned, unsignedbyte);
+
+?SIMPLE_PARSE_ARRAY($s, Value:16/signed, short);
+?SIMPLE_PARSE_ARRAY($u, Value:16/unsigned, unsignedshort);
+
+?SIMPLE_PARSE_ARRAY($I, Value:32/signed,   signedint);
+?SIMPLE_PARSE_ARRAY($i, Value:32/unsigned, unsignedint);
+
 ?SIMPLE_PARSE_ARRAY($d, Value:64/float, double);
 ?SIMPLE_PARSE_ARRAY($f, Value:32/float, float);
+
 ?SIMPLE_PARSE_ARRAY($l, Value:64/signed, long);
-?SIMPLE_PARSE_ARRAY($s, Value:16/signed, short);
+
+
 
 parse_array(<<$t, Value:8/unsigned, Rest/binary>>) ->
     [{bool, (Value /= 0)} | parse_array(Rest)];
similarity index 90%
rename from rabbitmq-server/src/rabbit_channel.erl
rename to deps/rabbit_common/src/rabbit_channel.erl
index b23a8410c52f95b11783df185e60f6ef8e51fe0c..ef523a5b263e11cdf8977044f73a43ef6f76deae 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_channel).
+
+%% rabbit_channel processes represent an AMQP 0-9-1 channels.
+%%
+%% Connections parse protocol frames coming from clients and
+%% dispatch them to channel processes.
+%% Channels are responsible for implementing the logic behind
+%% the various protocol methods, involving other processes as
+%% needed:
+%%
+%%  * Routing messages (using functions in various exchange type
+%%    modules) to queue processes.
+%%  * Managing queues, exchanges, and bindings.
+%%  * Keeping track of consumers
+%%  * Keeping track of unacknowledged deliveries to consumers
+%%  * Keeping track of publisher confirms
+%%  * Keeping track of mandatory message routing confirmations
+%%    and returns
+%%  * Transaction management
+%%  * Authorisation (enforcing permissions)
+%%  * Publishing trace events if tracing is enabled
+%%
+%% Every channel has a number of dependent processes:
+%%
+%%  * A writer which is responsible for sending frames to clients.
+%%  * A limiter which controls how many messages can be delivered
+%%    to consumers according to active QoS prefetch and internal
+%%    flow control logic.
+%%
+%% Channels are also aware of their connection's queue collector.
+%% When a queue is declared as exclusive on a channel, the channel
+%% will notify queue collector of that queue.
+
 -include("rabbit_framing.hrl").
 -include("rabbit.hrl").
 
@@ -23,7 +55,8 @@
 -export([start_link/11, do/2, do/3, do_flow/3, flush/1, shutdown/1]).
 -export([send_command/2, deliver/4, deliver_reply/2,
          send_credit_reply/2, send_drained/2]).
--export([list/0, info_keys/0, info/1, info/2, info_all/0, info_all/1]).
+-export([list/0, info_keys/0, info/1, info/2, info_all/0, info_all/1,
+         info_all/3, info_local/1]).
 -export([refresh_config_local/0, ready_for_close/1]).
 -export([force_event_refresh/1]).
 
          prioritise_cast/3, prioritise_info/3, format_message_queue/2]).
 %% Internal
 -export([list_local/0, deliver_reply_local/3]).
+-export([get_vhost/1, get_user/1]).
+
+-record(ch, {
+  %% starting | running | flow | closing
+  state,
+  %% same as reader's protocol. Used when instantiating
+  %% (protocol) exceptions.
+  protocol,
+  %% channel number
+  channel,
+  %% reader process
+  reader_pid,
+  %% writer process
+  writer_pid,
+  %%
+  conn_pid,
+  %% same as reader's name, see #v1.name
+  %% in rabbit_reader
+  conn_name,
+  %% limiter pid, see rabbit_limiter
+  limiter,
+  %% none | {Msgs, Acks} | committing | failed |
+  tx,
+  %% (consumer) delivery tag sequence
+  next_tag,
+  %% messages pending consumer acknowledgement
+  unacked_message_q,
+  %% same as #v1.user in the reader, used in
+  %% authorisation checks
+  user,
+  %% same as #v1.user in the reader
+  virtual_host,
+  %% when queue.bind's queue field is empty,
+  %% this name will be used instead
+  most_recently_declared_queue,
+  %% a dictionary of queue pid to queue name
+  queue_names,
+  %% queue processes are monitored to update
+  %% queue names
+  queue_monitors,
+  %% a dictionary of consumer tags to
+  %% consumer details: #amqqueue record, acknowledgement mode,
+  %% consumer exclusivity, etc
+  consumer_mapping,
+  %% a dictionary of queue pids to consumer tag lists
+  queue_consumers,
+  %% a set of pids of queues that have unacknowledged
+  %% deliveries
+  delivering_queues,
+  %% when a queue is declared as exclusive, queue
+  %% collector must be notified.
+  %% see rabbit_queue_collector for more info.
+  queue_collector_pid,
+  %% timer used to emit statistics
+  stats_timer,
+  %% are publisher confirms enabled for this channel?
+  confirm_enabled,
+  %% publisher confirm delivery tag sequence
+  publish_seqno,
+  %% a dtree used to track unconfirmed
+  %% (to publishers) messages
+  unconfirmed,
+  %% a list of tags for published messages that were
+  %% delivered but are yet to be confirmed to the client
+  confirmed,
+  %% a dtree used to track oustanding notifications
+  %% for messages published as mandatory
+  mandatory,
+  %% same as capabilities in the reader
+  capabilities,
+  %% tracing exchange resource if tracing is enabled,
+  %% 'none' otherwise
+  trace_state,
+  consumer_prefetch,
+  %% used by "one shot RPC" (amq.
+  reply_consumer,
+  %% flow | noflow, see rabbitmq-server#114
+  delivery_flow,
+  interceptor_state
+}).
 
--record(ch, {state, protocol, channel, reader_pid, writer_pid, conn_pid,
-             conn_name, limiter, tx, next_tag, unacked_message_q, user,
-             virtual_host, most_recently_declared_queue,
-             queue_names, queue_monitors, consumer_mapping,
-             queue_consumers, delivering_queues,
-             queue_collector_pid, stats_timer, confirm_enabled, publish_seqno,
-             unconfirmed, confirmed, mandatory, capabilities, trace_state,
-             consumer_prefetch, reply_consumer,
-             %% flow | noflow, see rabbitmq-server#114
-             delivery_flow}).
 
 -define(MAX_PERMISSION_CACHE_SIZE, 12).
 
          acks_uncommitted,
          prefetch_count,
          global_prefetch_count,
-         state]).
+         state,
+         reductions,
+         garbage_collection]).
 
 -define(CREATION_EVENT_KEYS,
         [pid,
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([channel_number/0]).
 
--type(channel_number() :: non_neg_integer()).
-
--spec(start_link/11 ::
-        (channel_number(), pid(), pid(), pid(), string(),
-         rabbit_types:protocol(), rabbit_types:user(), rabbit_types:vhost(),
-         rabbit_framing:amqp_table(), pid(), pid()) ->
-                            rabbit_types:ok_pid_or_error()).
--spec(do/2 :: (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(do/3 :: (pid(), rabbit_framing:amqp_method_record(),
-               rabbit_types:maybe(rabbit_types:content())) -> 'ok').
--spec(do_flow/3 :: (pid(), rabbit_framing:amqp_method_record(),
-                    rabbit_types:maybe(rabbit_types:content())) -> 'ok').
--spec(flush/1 :: (pid()) -> 'ok').
--spec(shutdown/1 :: (pid()) -> 'ok').
--spec(send_command/2 :: (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(deliver/4 ::
-        (pid(), rabbit_types:ctag(), boolean(), rabbit_amqqueue:qmsg())
-        -> 'ok').
--spec(deliver_reply/2 :: (binary(), rabbit_types:delivery()) -> 'ok').
--spec(deliver_reply_local/3 ::
-        (pid(), binary(), rabbit_types:delivery()) -> 'ok').
--spec(send_credit_reply/2 :: (pid(), non_neg_integer()) -> 'ok').
--spec(send_drained/2 :: (pid(), [{rabbit_types:ctag(), non_neg_integer()}])
-                        -> 'ok').
--spec(list/0 :: () -> [pid()]).
--spec(list_local/0 :: () -> [pid()]).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(info/1 :: (pid()) -> rabbit_types:infos()).
--spec(info/2 :: (pid(), rabbit_types:info_keys()) -> rabbit_types:infos()).
--spec(info_all/0 :: () -> [rabbit_types:infos()]).
--spec(info_all/1 :: (rabbit_types:info_keys()) -> [rabbit_types:infos()]).
--spec(refresh_config_local/0 :: () -> 'ok').
--spec(ready_for_close/1 :: (pid()) -> 'ok').
--spec(force_event_refresh/1 :: (reference()) -> 'ok').
-
--endif.
+-type channel_number() :: non_neg_integer().
+
+-export_type([channel/0]).
+
+-type channel() :: #ch{}.
+
+-spec start_link
+        (channel_number(), pid(), pid(), pid(), string(), rabbit_types:protocol(),
+         rabbit_types:user(), rabbit_types:vhost(), rabbit_framing:amqp_table(),
+         pid(), pid()) ->
+            rabbit_types:ok_pid_or_error().
+-spec do(pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec do
+        (pid(), rabbit_framing:amqp_method_record(),
+         rabbit_types:maybe(rabbit_types:content())) ->
+            'ok'.
+-spec do_flow
+        (pid(), rabbit_framing:amqp_method_record(),
+         rabbit_types:maybe(rabbit_types:content())) ->
+            'ok'.
+-spec flush(pid()) -> 'ok'.
+-spec shutdown(pid()) -> 'ok'.
+-spec send_command(pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec deliver
+        (pid(), rabbit_types:ctag(), boolean(), rabbit_amqqueue:qmsg()) -> 'ok'.
+-spec deliver_reply(binary(), rabbit_types:delivery()) -> 'ok'.
+-spec deliver_reply_local(pid(), binary(), rabbit_types:delivery()) -> 'ok'.
+-spec send_credit_reply(pid(), non_neg_integer()) -> 'ok'.
+-spec send_drained(pid(), [{rabbit_types:ctag(), non_neg_integer()}]) -> 'ok'.
+-spec list() -> [pid()].
+-spec list_local() -> [pid()].
+-spec info_keys() -> rabbit_types:info_keys().
+-spec info(pid()) -> rabbit_types:infos().
+-spec info(pid(), rabbit_types:info_keys()) -> rabbit_types:infos().
+-spec info_all() -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:info_keys()) -> [rabbit_types:infos()].
+-spec info_all(rabbit_types:info_keys(), reference(), pid()) -> 'ok'.
+-spec refresh_config_local() -> 'ok'.
+-spec ready_for_close(pid()) -> 'ok'.
+-spec force_event_refresh(reference()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -219,6 +326,13 @@ info_all() ->
 info_all(Items) ->
     rabbit_misc:filter_exit_map(fun (C) -> info(C, Items) end, list()).
 
+info_local(Items) ->
+    rabbit_misc:filter_exit_map(fun (C) -> info(C, Items) end, list_local()).
+
+info_all(Items, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map_with_exit_handler(
+      AggregatorPid, Ref, fun(C) -> info(C, Items) end, list()).
+
 refresh_config_local() ->
     rabbit_misc:upmap(
       fun (C) -> gen_server2:call(C, refresh_config, infinity) end,
@@ -272,12 +386,16 @@ init([Channel, ReaderPid, WriterPid, ConnPid, ConnName, Protocol, User, VHost,
                 trace_state             = rabbit_trace:init(VHost),
                 consumer_prefetch       = 0,
                 reply_consumer          = none,
-                delivery_flow           = Flow},
-    State1 = rabbit_event:init_stats_timer(State, #ch.stats_timer),
-    rabbit_event:notify(channel_created, infos(?CREATION_EVENT_KEYS, State1)),
-    rabbit_event:if_enabled(State1, #ch.stats_timer,
-                            fun() -> emit_stats(State1) end),
-    {ok, State1, hibernate,
+                delivery_flow           = Flow,
+                interceptor_state       = undefined},
+    State1 = State#ch{
+               interceptor_state = rabbit_channel_interceptor:init(State)},
+    State2 = rabbit_event:init_stats_timer(State1, #ch.stats_timer),
+    rabbit_event:notify(channel_created, infos(?CREATION_EVENT_KEYS, State2)),
+    rabbit_event:if_enabled(State2, #ch.stats_timer,
+                            fun() -> emit_stats(State2) end),
+    put(channel_operation_timeout, ?CHANNEL_OPERATION_TIMEOUT),
+    {ok, State2, hibernate,
      {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
 
 prioritise_call(Msg, _From, _Len, _State) ->
@@ -326,8 +444,8 @@ handle_call(_Request, _From, State) ->
     noreply(State).
 
 handle_cast({method, Method, Content, Flow},
-            State = #ch{reader_pid   = Reader,
-                        virtual_host = VHost}) ->
+            State = #ch{reader_pid        = Reader,
+                        interceptor_state = IState}) ->
     case Flow of
         %% We are going to process a message from the rabbit_reader
         %% process, so here we ack it. In this case we are accessing
@@ -335,9 +453,10 @@ handle_cast({method, Method, Content, Flow},
         flow   -> credit_flow:ack(Reader);
         noflow -> ok
     end,
-    try handle_method(rabbit_channel_interceptor:intercept_method(
-                        expand_shortcuts(Method, State), VHost),
-                      Content, State) of
+
+    try handle_method(rabbit_channel_interceptor:intercept_in(
+                        expand_shortcuts(Method, State), Content, IState),
+                      State) of
         {reply, Reply, NewState} ->
             ok = send(Reply, NewState),
             noreply(NewState);
@@ -478,23 +597,26 @@ handle_info({'DOWN', _MRef, process, QPid, Reason}, State) ->
                       queue_monitors = pmon:erase(QPid, QMons)});
 
 handle_info({'EXIT', _Pid, Reason}, State) ->
-    {stop, Reason, State}.
+    {stop, Reason, State};
+
+handle_info({{Ref, Node}, LateAnswer}, State = #ch{channel = Channel})
+  when is_reference(Ref) ->
+    log(warning, "Channel ~p ignoring late answer ~p from ~p",
+        [Channel, LateAnswer, Node]),
+    noreply(State).
 
 handle_pre_hibernate(State) ->
     ok = clear_permission_cache(),
     rabbit_event:if_enabled(
       State, #ch.stats_timer,
-      fun () -> emit_stats(State, [{idle_since, now()}]) end),
+      fun () -> emit_stats(State,
+                           [{idle_since,
+                             time_compat:os_system_time(milli_seconds)}])
+                end),
     {hibernate, rabbit_event:stop_stats_timer(State, #ch.stats_timer)}.
 
-terminate(Reason, State) ->
-    {Res, _State1} = notify_queues(State),
-    case Reason of
-        normal            -> ok = Res;
-        shutdown          -> ok = Res;
-        {shutdown, _Term} -> ok = Res;
-        _                 -> ok
-    end,
+terminate(_Reason, State) ->
+    {_Res, _State1} = notify_queues(State),
     pg_local:leave(rabbit_channels, self()),
     rabbit_event:if_enabled(State, #ch.stats_timer,
                             fun() -> emit_stats(State) end),
@@ -533,6 +655,11 @@ send(_Command, #ch{state = closing}) ->
 send(Command, #ch{writer_pid = WriterPid}) ->
     ok = rabbit_writer:send_command(WriterPid, Command).
 
+format_soft_error(#amqp_error{name = N, explanation = E, method = M}) ->
+    io_lib:format("operation ~s caused a channel exception ~s: ~p", [M, N, E]);
+format_soft_error(Reason) ->
+    Reason.
+
 handle_exception(Reason, State = #ch{protocol     = Protocol,
                                      channel      = Channel,
                                      writer_pid   = WriterPid,
@@ -546,9 +673,9 @@ handle_exception(Reason, State = #ch{protocol     = Protocol,
     case rabbit_binary_generator:map_exception(Channel, Reason, Protocol) of
         {Channel, CloseMethod} ->
             log(error, "Channel error on connection ~p (~s, vhost: '~s',"
-                       " user: '~s'), channel ~p:~n~p~n",
-                       [ConnPid, ConnName, VHost, User#user.username,
-                        Channel, Reason]),
+                " user: '~s'), channel ~p:~n~s~n",
+                [ConnPid, ConnName, VHost, User#user.username,
+                 Channel, format_soft_error(Reason)]),
             ok = rabbit_writer:send_command(WriterPid, CloseMethod),
             {noreply, State1};
         {0, _} ->
@@ -556,14 +683,12 @@ handle_exception(Reason, State = #ch{protocol     = Protocol,
             {stop, normal, State1}
     end.
 
--ifdef(use_specs).
--spec(precondition_failed/1 :: (string()) -> no_return()).
--endif.
+-spec precondition_failed(string()) -> no_return().
+
 precondition_failed(Format) -> precondition_failed(Format, []).
 
--ifdef(use_specs).
--spec(precondition_failed/2 :: (string(), [any()]) -> no_return()).
--endif.
+-spec precondition_failed(string(), [any()]) -> no_return().
+
 precondition_failed(Format, Params) ->
     rabbit_misc:protocol_error(precondition_failed, Format, Params).
 
@@ -730,6 +855,9 @@ record_confirms([], State) ->
 record_confirms(MXs, State = #ch{confirmed = C}) ->
     State#ch{confirmed = [MXs | C]}.
 
+handle_method({Method, Content}, State) ->
+    handle_method(Method, Content, State).
+
 handle_method(#'channel.open'{}, _, State = #ch{state = starting}) ->
     %% Don't leave "starting" as the state for 5s. TODO is this TRTTD?
     State1 = State#ch{state = running},
@@ -739,7 +867,7 @@ handle_method(#'channel.open'{}, _, State = #ch{state = starting}) ->
 
 handle_method(#'channel.open'{}, _, _State) ->
     rabbit_misc:protocol_error(
-      command_invalid, "second 'channel.open' seen", []);
+      channel_error, "second 'channel.open' seen", []);
 
 handle_method(_Method, _, #ch{state = starting}) ->
     rabbit_misc:protocol_error(channel_error, "expected 'channel.open'", []);
@@ -756,7 +884,7 @@ handle_method(_Method, _, State = #ch{state = closing}) ->
     {noreply, State};
 
 handle_method(#'channel.close'{}, _, State = #ch{reader_pid = ReaderPid}) ->
-    {ok, State1} = notify_queues(State),
+    {_Result, State1} = notify_queues(State),
     %% We issue the channel.close_ok response after a handshake with
     %% the reader, the other half of which is ready_for_close. That
     %% way the reader forgets about the channel before we send the
@@ -1643,7 +1771,9 @@ notify_queues(State = #ch{consumer_mapping  = Consumers,
                           delivering_queues = DQ }) ->
     QPids = sets:to_list(
               sets:union(sets:from_list(consumer_queues(Consumers)), DQ)),
-    {rabbit_amqqueue:notify_down_all(QPids, self()), State#ch{state = closing}}.
+    {rabbit_amqqueue:notify_down_all(QPids, self(),
+                                     get(channel_operation_timeout)),
+     State#ch{state = closing}}.
 
 foreach_per_queue(_F, []) ->
     ok;
@@ -1847,11 +1977,16 @@ i(messages_uncommitted,    #ch{tx = {Msgs, _Acks}})       -> queue:len(Msgs);
 i(messages_uncommitted,    #ch{})                         -> 0;
 i(acks_uncommitted,        #ch{tx = {_Msgs, Acks}})       -> ack_len(Acks);
 i(acks_uncommitted,        #ch{})                         -> 0;
-i(state,                   #ch{state = running})         -> credit_flow:state();
+i(state,                   #ch{state = running})          -> credit_flow:state();
 i(state,                   #ch{state = State})            -> State;
 i(prefetch_count,          #ch{consumer_prefetch = C})    -> C;
 i(global_prefetch_count, #ch{limiter = Limiter}) ->
     rabbit_limiter:get_prefetch_limit(Limiter);
+i(garbage_collection, _State) ->
+    rabbit_misc:get_gc_info(self());
+i(reductions, _State) ->
+    {reductions, Reductions} = erlang:process_info(self(), reductions),
+    Reductions;
 i(Item, _) ->
     throw({bad_argument, Item}).
 
@@ -1895,3 +2030,7 @@ erase_queue_stats(QName) ->
     [erase({queue_exchange_stats, QX}) ||
         {{queue_exchange_stats, QX = {QName0, _}}, _} <- get(),
         QName0 =:= QName].
+
+get_vhost(#ch{virtual_host = VHost}) -> VHost.
+
+get_user(#ch{user = User}) -> User.
diff --git a/deps/rabbit_common/src/rabbit_channel_interceptor.erl b/deps/rabbit_common/src/rabbit_channel_interceptor.erl
new file mode 100644 (file)
index 0000000..909bf4a
--- /dev/null
@@ -0,0 +1,104 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_channel_interceptor).
+
+-include("rabbit_framing.hrl").
+-include("rabbit.hrl").
+
+-export([init/1, intercept_in/3]).
+
+-type(method_name() :: rabbit_framing:amqp_method_name()).
+-type(original_method() :: rabbit_framing:amqp_method_record()).
+-type(processed_method() :: rabbit_framing:amqp_method_record()).
+-type(original_content() :: rabbit_types:maybe(rabbit_types:content())).
+-type(processed_content() :: rabbit_types:maybe(rabbit_types:content())).
+-type(interceptor_state() :: term()).
+
+-callback description() -> [proplists:property()].
+%% Derive some initial state from the channel. This will be passed back
+%% as the third argument of intercept/3.
+-callback init(rabbit_channel:channel()) -> interceptor_state().
+-callback intercept(original_method(), original_content(),
+                    interceptor_state()) ->
+    {processed_method(), processed_content()} |
+    rabbit_misc:channel_or_connection_exit().
+-callback applies_to() -> list(method_name()).
+
+init(Ch) ->
+    Mods = [M || {_, M} <- rabbit_registry:lookup_all(channel_interceptor)],
+    check_no_overlap(Mods),
+    [{Mod, Mod:init(Ch)} || Mod <- Mods].
+
+check_no_overlap(Mods) ->
+    check_no_overlap1([sets:from_list(Mod:applies_to()) || Mod <- Mods]).
+
+%% Check no non-empty pairwise intersection in a list of sets
+check_no_overlap1(Sets) ->
+    lists:foldl(fun(Set, Union) ->
+                    Is = sets:intersection(Set, Union),
+                    case sets:size(Is) of
+                        0 -> ok;
+                        _ ->
+                            internal_error("Interceptor: more than one "
+                                                "module handles ~p~n", [Is])
+                      end,
+                    sets:union(Set, Union)
+                end,
+                sets:new(),
+                Sets),
+    ok.
+
+intercept_in(M, C, Mods) ->
+    lists:foldl(fun({Mod, ModState}, {M1, C1}) ->
+                    call_module(Mod, ModState, M1, C1)
+                end,
+                {M, C},
+                Mods).
+
+call_module(Mod, St, M, C) ->
+    % this little dance is because Mod might be unloaded at any point
+    case (catch {ok, Mod:intercept(M, C, St)}) of
+        {ok, R} -> validate_response(Mod, M, C, R);
+        {'EXIT', {undef, [{Mod, intercept, _, _} | _]}} -> {M, C}
+    end.
+
+validate_response(Mod, M1, C1, R = {M2, C2}) ->
+    case {validate_method(M1, M2), validate_content(C1, C2)} of
+        {true, true} -> R;
+        {false, _} ->
+            internal_error("Interceptor: ~p expected to return "
+                                "method: ~p but returned: ~p",
+                           [Mod, rabbit_misc:method_record_type(M1),
+                            rabbit_misc:method_record_type(M2)]);
+        {_, false} ->
+            internal_error("Interceptor: ~p expected to return "
+                                "content iff content is provided but "
+                                "content in = ~p; content out = ~p",
+                           [Mod, C1, C2])
+    end.
+
+validate_method(M, M2) ->
+    rabbit_misc:method_record_type(M) =:= rabbit_misc:method_record_type(M2).
+
+validate_content(none, none) -> true;
+validate_content(#content{}, #content{}) -> true;
+validate_content(_, _) -> false.
+
+%% keep dialyzer happy
+-spec internal_error(string(), [any()]) -> no_return().
+internal_error(Format, Args) ->
+    rabbit_misc:protocol_error(internal_error, Format, Args).
similarity index 82%
rename from rabbitmq-server/src/rabbit_command_assembler.erl
rename to deps/rabbit_common/src/rabbit_command_assembler.erl
index f93b85b122a0b685a8e3516b5870882567de3e2d..5adf7b1b5f7940d404388e8269f844eb3227981f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_command_assembler).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([frame/0]).
 
--type(frame_type() :: ?FRAME_METHOD | ?FRAME_HEADER | ?FRAME_BODY |
+-type frame_type() :: ?FRAME_METHOD | ?FRAME_HEADER | ?FRAME_BODY |
                       ?FRAME_OOB_METHOD | ?FRAME_OOB_HEADER | ?FRAME_OOB_BODY |
-                      ?FRAME_TRACE | ?FRAME_HEARTBEAT).
--type(protocol()   :: rabbit_framing:protocol()).
--type(method()     :: rabbit_framing:amqp_method_record()).
--type(class_id()   :: rabbit_framing:amqp_class_id()).
--type(weight()     :: non_neg_integer()).
--type(body_size()  :: non_neg_integer()).
--type(content()    :: rabbit_types:undecoded_content()).
-
--type(frame() ::
+                      ?FRAME_TRACE | ?FRAME_HEARTBEAT.
+-type protocol()   :: rabbit_framing:protocol().
+-type method()     :: rabbit_framing:amqp_method_record().
+-type class_id()   :: rabbit_framing:amqp_class_id().
+-type weight()     :: non_neg_integer().
+-type body_size()  :: non_neg_integer().
+-type content()    :: rabbit_types:undecoded_content().
+
+-type frame() ::
         {'method',         rabbit_framing:amqp_method_name(), binary()} |
         {'content_header', class_id(), weight(), body_size(), binary()} |
-        {'content_body',   binary()}).
+        {'content_body',   binary()}.
 
--type(state() ::
+-type state() ::
         {'method',         protocol()} |
         {'content_header', method(), class_id(), protocol()} |
-        {'content_body',   method(), body_size(), class_id(), protocol()}).
-
--spec(analyze_frame/3 :: (frame_type(), binary(), protocol()) ->
-                              frame() | 'heartbeat' | 'error').
+        {'content_body',   method(), body_size(), class_id(), protocol()}.
 
--spec(init/1 :: (protocol()) -> {ok, state()}).
--spec(process/2 :: (frame(), state()) ->
-                        {ok, state()} |
-                        {ok, method(), state()} |
-                        {ok, method(), content(), state()} |
-                        {error, rabbit_types:amqp_error()}).
+-spec analyze_frame(frame_type(), binary(), protocol()) ->
+          frame() | 'heartbeat' | 'error'.
 
--endif.
+-spec init(protocol()) -> {ok, state()}.
+-spec process(frame(), state()) ->
+          {ok, state()} |
+          {ok, method(), state()} |
+          {ok, method(), content(), state()} |
+          {error, rabbit_types:amqp_error()}.
 
 %%--------------------------------------------------------------------
 
diff --git a/deps/rabbit_common/src/rabbit_common.app.src b/deps/rabbit_common/src/rabbit_common.app.src
new file mode 100644 (file)
index 0000000..3dbc2b7
--- /dev/null
@@ -0,0 +1,14 @@
+% vim:ft=erlang:
+
+{application, rabbit_common, [
+       {description, ""},
+       {vsn, "3.6.6"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+                stdlib,
+                xmerl
+       ]}
+]}.
diff --git a/deps/rabbit_common/src/rabbit_control_misc.erl b/deps/rabbit_common/src/rabbit_control_misc.erl
new file mode 100644 (file)
index 0000000..2e1f6cc
--- /dev/null
@@ -0,0 +1,93 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_control_misc).
+
+-export([emitting_map/4, emitting_map/5, emitting_map_with_exit_handler/4,
+         emitting_map_with_exit_handler/5, wait_for_info_messages/5,
+         print_cmd_result/2]).
+
+-spec emitting_map(pid(), reference(), fun(), list()) -> 'ok'.
+-spec emitting_map(pid(), reference(), fun(), list(), atom()) -> 'ok'.
+-spec emitting_map_with_exit_handler
+        (pid(), reference(), fun(), list()) -> 'ok'.
+-spec emitting_map_with_exit_handler
+        (pid(), reference(), fun(), list(), atom()) -> 'ok'.
+-spec print_cmd_result(atom(), term()) -> 'ok'.
+
+emitting_map(AggregatorPid, Ref, Fun, List) ->
+    emitting_map(AggregatorPid, Ref, Fun, List, continue),
+    AggregatorPid ! {Ref, finished},
+    ok.
+
+emitting_map(AggregatorPid, Ref, Fun, List, continue) ->
+    _ = emitting_map0(AggregatorPid, Ref, Fun, List, fun step/4),
+    ok.
+
+emitting_map_with_exit_handler(AggregatorPid, Ref, Fun, List) ->
+    emitting_map_with_exit_handler(AggregatorPid, Ref, Fun, List, continue),
+    AggregatorPid ! {Ref, finished},
+    ok.
+
+emitting_map_with_exit_handler(AggregatorPid, Ref, Fun, List, continue) ->
+    _ = emitting_map0(AggregatorPid, Ref, Fun, List, fun step_with_exit_handler/4),
+    ok.
+
+emitting_map0(AggregatorPid, Ref, Fun, List, StepFun) ->
+    [StepFun(AggregatorPid, Ref, Fun, Item) || Item <- List].
+
+step(AggregatorPid, Ref, Fun, Item) ->
+    AggregatorPid ! {Ref, Fun(Item), continue},
+    ok.
+
+step_with_exit_handler(AggregatorPid, Ref, Fun, Item) ->
+    Noop = make_ref(),
+    case rabbit_misc:with_exit_handler(
+           fun () -> Noop end,
+           fun () -> Fun(Item) end) of
+        Noop ->
+            ok;
+        Res  ->
+            AggregatorPid ! {Ref, Res, continue},
+            ok
+    end.
+
+wait_for_info_messages(Pid, Ref, ArgAtoms, DisplayFun, Timeout) ->
+    _ = notify_if_timeout(Pid, Ref, Timeout),
+    wait_for_info_messages(Ref, ArgAtoms, DisplayFun).
+
+wait_for_info_messages(Ref, InfoItemKeys, DisplayFun) when is_reference(Ref) ->
+    receive
+        {Ref,  finished}         ->
+            ok;
+        {Ref,  {timeout, T}}     ->
+            exit({error, {timeout, (T / 1000)}});
+        {Ref,  []}               ->
+            wait_for_info_messages(Ref, InfoItemKeys, DisplayFun);
+        {Ref,  Result, continue} ->
+            DisplayFun(Result, InfoItemKeys),
+            wait_for_info_messages(Ref, InfoItemKeys, DisplayFun);
+        {error, Error}           ->
+            Error;
+        _                        ->
+            wait_for_info_messages(Ref, InfoItemKeys, DisplayFun)
+    end.
+
+notify_if_timeout(Pid, Ref, Timeout) ->
+    timer:send_after(Timeout, Pid, {Ref, {timeout, Timeout}}).
+
+print_cmd_result(authenticate_user, _Result) -> io:format("Success~n");
+print_cmd_result(join_cluster, already_member) -> io:format("The node is already a member of this cluster~n").
diff --git a/deps/rabbit_common/src/rabbit_data_coercion.erl b/deps/rabbit_common/src/rabbit_data_coercion.erl
new file mode 100644 (file)
index 0000000..22b0dbc
--- /dev/null
@@ -0,0 +1,22 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_data_coercion).
+
+-export([to_binary/1]).
+
+to_binary(Val) when is_list(Val) -> list_to_binary(Val);
+to_binary(Val)                   -> Val.
diff --git a/deps/rabbit_common/src/rabbit_error_logger_handler.erl b/deps/rabbit_common/src/rabbit_error_logger_handler.erl
new file mode 100644 (file)
index 0000000..314d0e6
--- /dev/null
@@ -0,0 +1,175 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+-module(rabbit_error_logger_handler).
+
+-behaviour(gen_event).
+
+%% API
+-export([start_link/0, add_handler/0]).
+
+%% gen_event callbacks
+-export([init/1, handle_event/2, handle_call/2, 
+         handle_info/2, terminate/2, code_change/3]).
+
+-define(SERVER, ?MODULE).
+
+-record(state, {report = []}).
+
+%%%===================================================================
+%%% API
+%%%===================================================================
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Creates an event manager
+%%
+%% @spec start_link() -> {ok, Pid} | {error, Error}
+%% @end
+%%--------------------------------------------------------------------
+start_link() ->
+    gen_event:start_link({local, ?SERVER}).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Adds an event handler
+%%
+%% @spec add_handler() -> ok | {'EXIT', Reason} | term()
+%% @end
+%%--------------------------------------------------------------------
+add_handler() ->
+    gen_event:add_handler(?SERVER, ?MODULE, []).
+
+%%%===================================================================
+%%% gen_event callbacks
+%%%===================================================================
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Whenever a new event handler is added to an event manager,
+%% this function is called to initialize the event handler.
+%%
+%% @spec init(Args) -> {ok, State}
+%% @end
+%%--------------------------------------------------------------------
+init([]) ->
+    {ok, #state{}}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Whenever an event manager receives an event sent using
+%% gen_event:notify/2 or gen_event:sync_notify/2, this function is
+%% called for each installed event handler to handle the event.
+%%
+%% @spec handle_event(Event, State) ->
+%%                          {ok, State} |
+%%                          {swap_handler, Args1, State1, Mod2, Args2} |
+%%                          remove_handler
+%% @end
+%%--------------------------------------------------------------------
+
+handle_event({info_report, _Gleader, {_Pid, _Type,
+                                      {net_kernel, {'EXIT', _, Reason}}}},
+             #state{report = Report} = State) ->
+    NewReport = case format(Reason) of
+                    [] -> Report;
+                    Formatted -> [Formatted | Report]
+                end,
+    {ok, State#state{report = NewReport}};
+handle_event(_Event, State) ->
+    {ok, State}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Whenever an event manager receives a request sent using
+%% gen_event:call/3,4, this function is called for the specified
+%% event handler to handle the request.
+%%
+%% @spec handle_call(Request, State) ->
+%%                   {ok, Reply, State} |
+%%                   {swap_handler, Reply, Args1, State1, Mod2, Args2} |
+%%                   {remove_handler, Reply}
+%% @end
+%%--------------------------------------------------------------------
+handle_call(get_connection_report, State) ->
+    {ok, lists:reverse(State#state.report), State#state{report = []}};
+handle_call(_Request, State) ->
+    Reply = ok,
+    {ok, Reply, State}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% This function is called for each installed event handler when
+%% an event manager receives any other message than an event or a
+%% synchronous request (or a system message).
+%%
+%% @spec handle_info(Info, State) ->
+%%                         {ok, State} |
+%%                         {swap_handler, Args1, State1, Mod2, Args2} |
+%%                         remove_handler
+%% @end
+%%--------------------------------------------------------------------
+handle_info(_Info, State) ->
+    {ok, State}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Whenever an event handler is deleted from an event manager, this
+%% function is called. It should be the opposite of Module:init/1 and
+%% do any necessary cleaning up.
+%%
+%% @spec terminate(Reason, State) -> void()
+%% @end
+%%--------------------------------------------------------------------
+terminate(_Reason, _State) ->
+    ok.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Convert process state when code is changed
+%%
+%% @spec code_change(OldVsn, State, Extra) -> {ok, NewState}
+%% @end
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+%%%===================================================================
+%%% Internal functions
+%%%===================================================================
+format({check_dflag_xnc_failed, _What}) ->
+    {"  * Remote node uses an incompatible Erlang version ~n", []};
+format({recv_challenge_failed, no_node, Node}) ->
+    {"  * Hostname mismatch: node ~p believes its host is different. Please ensure that hostnames resolve the same way locally and on ~p~n", [Node, Node]};
+format({recv_challenge_failed, Error}) ->
+    {"  * Distribution failed unexpectedly while waiting for challenge: ~p~n", [Error]};
+format({recv_challenge_ack_failed, bad_cookie}) ->
+    {"  * Authentication failed (rejected by the local node), please check the Erlang cookie~n", []};
+format({recv_challenge_ack_failed, {error, closed}}) ->
+    {"  * Authentication failed (rejected by the remote node), please check the Erlang cookie~n", []};
+format({recv_status_failed, not_allowed}) ->
+    {"  * This node is not on the list of nodes authorised by remote node (see net_kernel:allow/1)~n", []};
+format({recv_status_failed, {error, closed}}) ->
+    {"  * Remote host closed TCP connection before completing authentication. Is the Erlang distribution using TLS?~n", []};
+format(setup_timer_timeout) ->
+    {"  * TCP connection to remote host has timed out. Is the Erlang distribution using TLS?~n", []};
+format(_) ->
+    [].
similarity index 73%
rename from rabbitmq-server/src/rabbit_event.erl
rename to deps/rabbit_common/src/rabbit_event.erl
index 13bf6bc6f862ea6ad223270988f7a106323d92bc..bd869dd736f6acfa053e268b13b6a0f0af5ff01d 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_event).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([event_type/0, event_props/0, event_timestamp/0, event/0]).
 
--type(event_type() :: atom()).
--type(event_props() :: term()).
--type(event_timestamp() ::
-        {non_neg_integer(), non_neg_integer(), non_neg_integer()}).
+-type event_type() :: atom().
+-type event_props() :: term().
+-type event_timestamp() :: non_neg_integer().
 
--type(event() :: #event { type      :: event_type(),
+-type event() :: #event { type      :: event_type(),
                           props     :: event_props(),
                           reference :: 'none' | reference(),
-                          timestamp :: event_timestamp() }).
-
--type(level() :: 'none' | 'coarse' | 'fine').
-
--type(timer_fun() :: fun (() -> 'ok')).
--type(container() :: tuple()).
--type(pos() :: non_neg_integer()).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(init_stats_timer/2 :: (container(), pos()) -> container()).
--spec(init_disabled_stats_timer/2 :: (container(), pos()) -> container()).
--spec(ensure_stats_timer/3 :: (container(), pos(), term()) -> container()).
--spec(stop_stats_timer/2 :: (container(), pos()) -> container()).
--spec(reset_stats_timer/2 :: (container(), pos()) -> container()).
--spec(stats_level/2 :: (container(), pos()) -> level()).
--spec(if_enabled/3 :: (container(), pos(), timer_fun()) -> 'ok').
--spec(notify/2 :: (event_type(), event_props()) -> 'ok').
--spec(notify/3 :: (event_type(), event_props(), reference() | 'none') -> 'ok').
--spec(notify_if/3 :: (boolean(), event_type(), event_props()) -> 'ok').
--spec(sync_notify/2 :: (event_type(), event_props()) -> 'ok').
--spec(sync_notify/3 :: (event_type(), event_props(),
-                        reference() | 'none') -> 'ok').
-
--endif.
+                          timestamp :: event_timestamp() }.
+
+-type level() :: 'none' | 'coarse' | 'fine'.
+
+-type timer_fun() :: fun (() -> 'ok').
+-type container() :: tuple().
+-type pos() :: non_neg_integer().
+
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec init_stats_timer(container(), pos()) -> container().
+-spec init_disabled_stats_timer(container(), pos()) -> container().
+-spec ensure_stats_timer(container(), pos(), term()) -> container().
+-spec stop_stats_timer(container(), pos()) -> container().
+-spec reset_stats_timer(container(), pos()) -> container().
+-spec stats_level(container(), pos()) -> level().
+-spec if_enabled(container(), pos(), timer_fun()) -> 'ok'.
+-spec notify(event_type(), event_props()) -> 'ok'.
+-spec notify(event_type(), event_props(), reference() | 'none') -> 'ok'.
+-spec notify_if(boolean(), event_type(), event_props()) -> 'ok'.
+-spec sync_notify(event_type(), event_props()) -> 'ok'.
+-spec sync_notify(event_type(), event_props(), reference() | 'none') -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
@@ -160,5 +154,5 @@ event_cons(Type, Props, Ref) ->
     #event{type      = Type,
            props     = Props,
            reference = Ref,
-           timestamp = os:timestamp()}.
+           timestamp = time_compat:os_system_time(milli_seconds)}.
 
similarity index 91%
rename from rabbitmq-server/src/rabbit_exchange_decorator.erl
rename to deps/rabbit_common/src/rabbit_exchange_decorator.erl
index 7c5bfdf9131703d1d06ef142cc7e55803598ad75..a43991b2755c5736cb9e15ebc459cd2a86c601dc 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_decorator).
@@ -29,8 +29,6 @@
 %% It's possible in the future we might make decorators
 %% able to manipulate messages as they are published.
 
--ifdef(use_specs).
-
 -type(tx() :: 'transaction' | 'none').
 -type(serial() :: pos_integer() | tx()).
 
 %% none:no callbacks, noroute:all callbacks except route, all:all callbacks
 -callback active_for(rabbit_types:exchange()) -> 'none' | 'noroute' | 'all'.
 
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{description, 0}, {serialise_events, 1}, {create, 2}, {delete, 3},
-     {policy_changed, 2}, {add_binding, 3}, {remove_bindings, 3},
-     {route, 2}, {active_for, 1}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
-
 %%----------------------------------------------------------------------------
 
 %% select a subset of active decorators
similarity index 85%
rename from rabbitmq-server/src/rabbit_exchange_type.erl
rename to deps/rabbit_common/src/rabbit_exchange_type.erl
index 92c1de6c2183790624eed0bede9be3e475e4f0c6..5f282d1a3f10d01a0d77da975d617b61bd7ed375 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type).
 
--ifdef(use_specs).
-
 -type(tx() :: 'transaction' | 'none').
 -type(serial() :: pos_integer() | tx()).
 
 -callback assert_args_equivalence(rabbit_types:exchange(),
                                   rabbit_framing:amqp_table()) ->
     'ok' | rabbit_types:connection_exit().
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{description, 0}, {serialise_events, 0}, {route, 2},
-     {validate, 1}, {validate_binding, 2}, {policy_changed, 2},
-     {create, 2}, {delete, 3}, {add_binding, 3}, {remove_bindings, 3},
-     {assert_args_equivalence, 2}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
diff --git a/deps/rabbit_common/src/rabbit_health_check.erl b/deps/rabbit_common/src/rabbit_health_check.erl
new file mode 100644 (file)
index 0000000..f3bc3fa
--- /dev/null
@@ -0,0 +1,95 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+-module(rabbit_health_check).
+
+%% External API
+-export([node/1, node/2]).
+
+%% Internal API
+-export([local/0]).
+
+-spec node(node(), timeout()) -> ok | {badrpc, term()} | {error_string, string()}.
+-spec local() -> ok | {error_string, string()}.
+
+%%----------------------------------------------------------------------------
+%% External functions
+%%----------------------------------------------------------------------------
+
+node(Node) ->
+    %% same default as in CLI
+    node(Node, 70000).
+node(Node, Timeout) ->
+    rabbit_misc:rpc_call(Node, rabbit_health_check, local, [], Timeout).
+
+local() ->
+    run_checks([list_channels, list_queues, alarms, rabbit_node_monitor]).
+
+%%----------------------------------------------------------------------------
+%% Internal functions
+%%----------------------------------------------------------------------------
+run_checks([]) ->
+    ok;
+run_checks([C|Cs]) ->
+    case node_health_check(C) of
+        ok ->
+            run_checks(Cs);
+        Error ->
+            Error
+    end.
+
+node_health_check(list_channels) ->
+    case rabbit_channel:info_local([pid]) of
+        L when is_list(L) ->
+            ok;
+        Other ->
+            ErrorMsg = io_lib:format("list_channels unexpected output: ~p",
+                                     [Other]),
+            {error_string, ErrorMsg}
+    end;
+
+node_health_check(list_queues) ->
+    health_check_queues(rabbit_vhost:list());
+
+node_health_check(rabbit_node_monitor) ->
+    case rabbit_node_monitor:partitions() of
+        L when is_list(L) ->
+            ok;
+        Other ->
+            ErrorMsg = io_lib:format("rabbit_node_monitor reports unexpected partitions value: ~p",
+                                     [Other]),
+            {error_string, ErrorMsg}
+    end;
+
+node_health_check(alarms) ->
+    case proplists:get_value(alarms, rabbit:status()) of
+        [] ->
+            ok;
+        Alarms ->
+            ErrorMsg = io_lib:format("resource alarm(s) in effect:~p", [Alarms]),
+            {error_string, ErrorMsg}
+    end.
+
+health_check_queues([]) ->
+    ok;
+health_check_queues([VHost|RestVHosts]) ->
+    case rabbit_amqqueue:info_local(VHost) of
+        L when is_list(L) ->
+            health_check_queues(RestVHosts);
+        Other ->
+            ErrorMsg = io_lib:format("list_queues unexpected output for vhost ~s: ~p",
+                                     [VHost, Other]),
+            {error_string, ErrorMsg}
+    end.
similarity index 83%
rename from rabbitmq-server/src/rabbit_heartbeat.erl
rename to deps/rabbit_common/src/rabbit_heartbeat.erl
index 993076770fdf4726a0ef4cb5d22d87a03e845ffc..c9b366917a13c0735efa6c45a0e355d7631903af 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_heartbeat).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([heartbeaters/0]).
 
--type(heartbeaters() :: {rabbit_types:maybe(pid()), rabbit_types:maybe(pid())}).
+-type heartbeaters() :: {rabbit_types:maybe(pid()), rabbit_types:maybe(pid())}.
 
--type(heartbeat_callback() :: fun (() -> any())).
+-type heartbeat_callback() :: fun (() -> any()).
 
--spec(start/6 ::
-        (pid(), rabbit_net:socket(),
-         non_neg_integer(), heartbeat_callback(),
-         non_neg_integer(), heartbeat_callback()) -> heartbeaters()).
+-spec start
+        (pid(), rabbit_net:socket(), non_neg_integer(), heartbeat_callback(),
+         non_neg_integer(), heartbeat_callback()) ->
+            heartbeaters().
 
--spec(start/7 ::
+-spec start
         (pid(), rabbit_net:socket(), rabbit_types:proc_name(),
-         non_neg_integer(), heartbeat_callback(),
-         non_neg_integer(), heartbeat_callback()) -> heartbeaters()).
+         non_neg_integer(), heartbeat_callback(), non_neg_integer(),
+         heartbeat_callback()) ->
+            heartbeaters().
 
--spec(start_heartbeat_sender/4 ::
+-spec start_heartbeat_sender
         (rabbit_net:socket(), non_neg_integer(), heartbeat_callback(),
-         rabbit_types:proc_type_and_name()) -> rabbit_types:ok(pid())).
--spec(start_heartbeat_receiver/4 ::
+         rabbit_types:proc_type_and_name()) ->
+            rabbit_types:ok(pid()).
+-spec start_heartbeat_receiver
         (rabbit_net:socket(), non_neg_integer(), heartbeat_callback(),
-         rabbit_types:proc_type_and_name()) -> rabbit_types:ok(pid())).
-
--spec(pause_monitor/1 :: (heartbeaters()) -> 'ok').
--spec(resume_monitor/1 :: (heartbeaters()) -> 'ok').
+         rabbit_types:proc_type_and_name()) ->
+            rabbit_types:ok(pid()).
 
--spec(system_code_change/4 :: (_,_,_,_) -> {'ok',_}).
--spec(system_continue/3 :: (_,_,{_, _}) -> any()).
--spec(system_terminate/4 :: (_,_,_,_) -> none()).
+-spec pause_monitor(heartbeaters()) -> 'ok'.
+-spec resume_monitor(heartbeaters()) -> 'ok'.
 
--endif.
+-spec system_code_change(_,_,_,_) -> {'ok',_}.
+-spec system_continue(_,_,{_, _}) -> any().
+-spec system_terminate(_,_,_,_) -> none().
 
 %%----------------------------------------------------------------------------
 start(SupPid, Sock, SendTimeoutSec, SendFun, ReceiveTimeoutSec, ReceiveFun) ->
@@ -116,7 +115,7 @@ start_heartbeater(TimeoutSec, SupPid, Sock, TimeoutFun, Name, Callback,
       SupPid, {Name,
                {rabbit_heartbeat, Callback,
                 [Sock, TimeoutSec, TimeoutFun, {Name, Identity}]},
-               transient, ?MAX_WAIT, worker, [rabbit_heartbeat]}).
+               transient, ?WORKER_WAIT, worker, [rabbit_heartbeat]}).
 
 heartbeater(Params, Identity) ->
     Deb = sys:debug_options([]),
similarity index 70%
rename from rabbitmq-server/src/rabbit_misc.erl
rename to deps/rabbit_common/src/rabbit_misc.erl
index ed5b38e815b4bfde040f2aa121dd14cac65c4d26..75cee8b6c0547311c58e4f903358b9aff8f4af0e 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_misc).
 -include("rabbit.hrl").
 -include("rabbit_framing.hrl").
+-include("rabbit_misc.hrl").
 
 -export([method_record_type/1, polite_pause/0, polite_pause/1]).
 -export([die/1, frame_error/2, amqp_error/4, quit/1,
@@ -44,6 +45,7 @@
 -export([format/2, format_many/1, format_stderr/2]).
 -export([unfold/2, ceil/1, queue_fold/3]).
 -export([sort_field_table/1]).
+-export([atom_to_binary/1]).
 -export([pid_to_string/1, string_to_pid/1,
          pid_change_node/2, node_to_fake_pid/1]).
 -export([version_compare/2, version_compare/3]).
 -export([dict_cons/3, orddict_cons/3, gb_trees_cons/3]).
 -export([gb_trees_fold/3, gb_trees_foreach/2]).
 -export([all_module_attributes/1, build_acyclic_graph/3]).
--export([now_ms/0]).
 -export([const/1]).
 -export([ntoa/1, ntoab/1]).
 -export([is_process_alive/1]).
--export([pget/2, pget/3, pget_or_die/2, pset/3]).
+-export([pget/2, pget/3, pget_or_die/2, pmerge/3, pset/3, plmerge/2]).
 -export([format_message_queue/2]).
 -export([append_rpc_all_nodes/4]).
 -export([os_cmd/1]).
 -export([json_encode/1, json_decode/1, json_to_term/1, term_to_json/1]).
 -export([check_expiry/1]).
 -export([base64url/1]).
--export([interval_operation/4]).
+-export([interval_operation/5]).
 -export([ensure_timer/4, stop_timer/2, send_after/3, cancel_timer/1]).
 -export([get_parent/0]).
--export([store_proc_name/1, store_proc_name/2]).
+-export([store_proc_name/1, store_proc_name/2, get_proc_name/0]).
 -export([moving_average/4]).
--export([now_to_ms/1]).
 -export([get_env/3]).
+-export([get_channel_operation_timeout/0]).
+-export([random/1]).
+-export([rpc_call/4, rpc_call/5, rpc_call/7]).
+-export([report_default_thread_pool_size/0]).
+-export([get_gc_info/1]).
 
 %% Horrible macro to use in guards
 -define(IS_BENIGN_EXIT(R),
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([resource_name/0, thunk/1, channel_or_connection_exit/0]).
 
--type(ok_or_error() :: rabbit_types:ok_or_error(any())).
--type(thunk(T) :: fun(() -> T)).
--type(resource_name() :: binary()).
--type(channel_or_connection_exit()
-      :: rabbit_types:channel_exit() | rabbit_types:connection_exit()).
--type(digraph_label() :: term()).
--type(graph_vertex_fun() ::
-        fun (({atom(), [term()]}) -> [{digraph:vertex(), digraph_label()}])).
--type(graph_edge_fun() ::
-        fun (({atom(), [term()]}) -> [{digraph:vertex(), digraph:vertex()}])).
--type(tref() :: {'erlang', reference()} | {timer, timer:tref()}).
-
--spec(method_record_type/1 :: (rabbit_framing:amqp_method_record())
-                              -> rabbit_framing:amqp_method_name()).
--spec(polite_pause/0 :: () -> 'done').
--spec(polite_pause/1 :: (non_neg_integer()) -> 'done').
--spec(die/1 ::
-        (rabbit_framing:amqp_exception()) -> channel_or_connection_exit()).
-
--spec(quit/1 :: (integer()) -> no_return()).
-
--spec(frame_error/2 :: (rabbit_framing:amqp_method_name(), binary())
-                       -> rabbit_types:connection_exit()).
--spec(amqp_error/4 ::
+-type ok_or_error() :: rabbit_types:ok_or_error(any()).
+-type thunk(T) :: fun(() -> T).
+-type resource_name() :: binary().
+-type channel_or_connection_exit()
+      :: rabbit_types:channel_exit() | rabbit_types:connection_exit().
+-type digraph_label() :: term().
+-type graph_vertex_fun() ::
+        fun (({atom(), [term()]}) -> [{digraph:vertex(), digraph_label()}]).
+-type graph_edge_fun() ::
+        fun (({atom(), [term()]}) -> [{digraph:vertex(), digraph:vertex()}]).
+-type tref() :: {'erlang', reference()} | {timer, timer:tref()}.
+
+-spec method_record_type(rabbit_framing:amqp_method_record()) ->
+          rabbit_framing:amqp_method_name().
+-spec polite_pause() -> 'done'.
+-spec polite_pause(non_neg_integer()) -> 'done'.
+-spec die(rabbit_framing:amqp_exception()) -> channel_or_connection_exit().
+
+-spec quit(integer()) -> no_return().
+
+-spec frame_error(rabbit_framing:amqp_method_name(), binary()) ->
+          rabbit_types:connection_exit().
+-spec amqp_error
         (rabbit_framing:amqp_exception(), string(), [any()],
-         rabbit_framing:amqp_method_name())
-        -> rabbit_types:amqp_error()).
--spec(protocol_error/3 :: (rabbit_framing:amqp_exception(), string(), [any()])
-                          -> channel_or_connection_exit()).
--spec(protocol_error/4 ::
+         rabbit_framing:amqp_method_name()) ->
+            rabbit_types:amqp_error().
+-spec protocol_error(rabbit_framing:amqp_exception(), string(), [any()]) ->
+          channel_or_connection_exit().
+-spec protocol_error
         (rabbit_framing:amqp_exception(), string(), [any()],
-         rabbit_framing:amqp_method_name()) -> channel_or_connection_exit()).
--spec(protocol_error/1 ::
-        (rabbit_types:amqp_error()) -> channel_or_connection_exit()).
--spec(not_found/1 :: (rabbit_types:r(atom())) -> rabbit_types:channel_exit()).
--spec(absent/2 :: (rabbit_types:amqqueue(), rabbit_amqqueue:absent_reason())
-                  -> rabbit_types:channel_exit()).
--spec(type_class/1 :: (rabbit_framing:amqp_field_type()) -> atom()).
--spec(assert_args_equivalence/4 :: (rabbit_framing:amqp_table(),
-                                    rabbit_framing:amqp_table(),
-                                    rabbit_types:r(any()), [binary()]) ->
-                                        'ok' | rabbit_types:connection_exit()).
--spec(assert_field_equivalence/4 ::
+         rabbit_framing:amqp_method_name()) ->
+            channel_or_connection_exit().
+-spec protocol_error(rabbit_types:amqp_error()) ->
+          channel_or_connection_exit().
+-spec not_found(rabbit_types:r(atom())) -> rabbit_types:channel_exit().
+-spec absent(rabbit_types:amqqueue(), rabbit_amqqueue:absent_reason()) ->
+          rabbit_types:channel_exit().
+-spec type_class(rabbit_framing:amqp_field_type()) -> atom().
+-spec assert_args_equivalence
+        (rabbit_framing:amqp_table(), rabbit_framing:amqp_table(),
+         rabbit_types:r(any()), [binary()]) ->
+            'ok' | rabbit_types:connection_exit().
+-spec assert_field_equivalence
         (any(), any(), rabbit_types:r(any()), atom() | binary()) ->
-                                         'ok' | rabbit_types:connection_exit()).
--spec(equivalence_fail/4 ::
+            'ok' | rabbit_types:connection_exit().
+-spec equivalence_fail
         (any(), any(), rabbit_types:r(any()), atom() | binary()) ->
-                                 rabbit_types:connection_exit()).
--spec(dirty_read/1 ::
-        ({atom(), any()}) -> rabbit_types:ok_or_error2(any(), 'not_found')).
--spec(table_lookup/2 ::
-        (rabbit_framing:amqp_table(), binary())
-        -> 'undefined' | {rabbit_framing:amqp_field_type(), any()}).
--spec(set_table_value/4 ::
-        (rabbit_framing:amqp_table(), binary(),
-         rabbit_framing:amqp_field_type(), rabbit_framing:amqp_value())
-        -> rabbit_framing:amqp_table()).
--spec(r/2 :: (rabbit_types:vhost(), K)
-             -> rabbit_types:r3(rabbit_types:vhost(), K, '_')
-                    when is_subtype(K, atom())).
--spec(r/3 ::
-        (rabbit_types:vhost() | rabbit_types:r(atom()), K, resource_name())
-        -> rabbit_types:r3(rabbit_types:vhost(), K, resource_name())
-               when is_subtype(K, atom())).
--spec(r_arg/4 ::
+            rabbit_types:connection_exit().
+-spec dirty_read({atom(), any()}) ->
+          rabbit_types:ok_or_error2(any(), 'not_found').
+-spec table_lookup(rabbit_framing:amqp_table(), binary()) ->
+          'undefined' | {rabbit_framing:amqp_field_type(), any()}.
+-spec set_table_value
+        (rabbit_framing:amqp_table(), binary(), rabbit_framing:amqp_field_type(),
+         rabbit_framing:amqp_value()) ->
+            rabbit_framing:amqp_table().
+-spec r(rabbit_types:vhost(), K) ->
+          rabbit_types:r3(rabbit_types:vhost(), K, '_')
+          when is_subtype(K, atom()).
+-spec r(rabbit_types:vhost() | rabbit_types:r(atom()), K, resource_name()) ->
+          rabbit_types:r3(rabbit_types:vhost(), K, resource_name())
+          when is_subtype(K, atom()).
+-spec r_arg
         (rabbit_types:vhost() | rabbit_types:r(atom()), K,
          rabbit_framing:amqp_table(), binary()) ->
-                      undefined |
-                      rabbit_types:error(
-                        {invalid_type, rabbit_framing:amqp_field_type()}) |
-                      rabbit_types:r(K) when is_subtype(K, atom())).
--spec(rs/1 :: (rabbit_types:r(atom())) -> string()).
--spec(enable_cover/0 :: () -> ok_or_error()).
--spec(start_cover/1 :: ([{string(), string()} | string()]) -> 'ok').
--spec(report_cover/0 :: () -> 'ok').
--spec(enable_cover/1 :: ([file:filename() | atom()]) -> ok_or_error()).
--spec(report_cover/1 :: ([file:filename() | atom()]) -> 'ok').
--spec(throw_on_error/2 ::
-        (atom(), thunk(rabbit_types:error(any()) | {ok, A} | A)) -> A).
--spec(with_exit_handler/2 :: (thunk(A), thunk(A)) -> A).
--spec(is_abnormal_exit/1 :: (any()) -> boolean()).
--spec(filter_exit_map/2 :: (fun ((A) -> B), [A]) -> [B]).
--spec(with_user/2 :: (rabbit_types:username(), thunk(A)) -> A).
--spec(with_user_and_vhost/3 ::
-        (rabbit_types:username(), rabbit_types:vhost(), thunk(A))
-        -> A).
--spec(execute_mnesia_transaction/1 :: (thunk(A)) -> A).
--spec(execute_mnesia_transaction/2 ::
-        (thunk(A), fun ((A, boolean()) -> B)) -> B).
--spec(execute_mnesia_tx_with_tail/1 ::
-        (thunk(fun ((boolean()) -> B))) -> B | (fun ((boolean()) -> B))).
--spec(ensure_ok/2 :: (ok_or_error(), atom()) -> 'ok').
--spec(tcp_name/3 ::
-        (atom(), inet:ip_address(), rabbit_networking:ip_port())
-        -> atom()).
--spec(format_inet_error/1 :: (atom()) -> string()).
--spec(upmap/2 :: (fun ((A) -> B), [A]) -> [B]).
--spec(map_in_order/2 :: (fun ((A) -> B), [A]) -> [B]).
--spec(table_filter/3:: (fun ((A) -> boolean()), fun ((A, boolean()) -> 'ok'),
-                                                    atom()) -> [A]).
--spec(dirty_read_all/1 :: (atom()) -> [any()]).
--spec(dirty_foreach_key/2 :: (fun ((any()) -> any()), atom())
-                             -> 'ok' | 'aborted').
--spec(dirty_dump_log/1 :: (file:filename()) -> ok_or_error()).
--spec(format/2 :: (string(), [any()]) -> string()).
--spec(format_many/1 :: ([{string(), [any()]}]) -> string()).
--spec(format_stderr/2 :: (string(), [any()]) -> 'ok').
--spec(unfold/2  :: (fun ((A) -> ({'true', B, A} | 'false')), A) -> {[B], A}).
--spec(ceil/1 :: (number()) -> integer()).
--spec(queue_fold/3 :: (fun ((any(), B) -> B), B, queue:queue()) -> B).
--spec(sort_field_table/1 ::
-        (rabbit_framing:amqp_table()) -> rabbit_framing:amqp_table()).
--spec(pid_to_string/1 :: (pid()) -> string()).
--spec(string_to_pid/1 :: (string()) -> pid()).
--spec(pid_change_node/2 :: (pid(), node()) -> pid()).
--spec(node_to_fake_pid/1 :: (atom()) -> pid()).
--spec(version_compare/2 :: (string(), string()) -> 'lt' | 'eq' | 'gt').
--spec(version_compare/3 ::
-        (string(), string(), ('lt' | 'lte' | 'eq' | 'gte' | 'gt'))
-        -> boolean()).
--spec(version_minor_equivalent/2 :: (string(), string()) -> boolean()).
--spec(dict_cons/3 :: (any(), any(), dict:dict()) -> dict:dict()).
--spec(orddict_cons/3 :: (any(), any(), orddict:orddict()) -> orddict:orddict()).
--spec(gb_trees_cons/3 :: (any(), any(), gb_trees:tree()) -> gb_trees:tree()).
--spec(gb_trees_fold/3 :: (fun ((any(), any(), A) -> A), A, gb_trees:tree())
- -> A).
--spec(gb_trees_foreach/2 ::
-        (fun ((any(), any()) -> any()), gb_trees:tree()) -> 'ok').
--spec(all_module_attributes/1 ::
-        (atom()) -> [{atom(), atom(), [term()]}]).
--spec(build_acyclic_graph/3 ::
-        (graph_vertex_fun(), graph_edge_fun(), [{atom(), [term()]}])
-        -> rabbit_types:ok_or_error2(digraph:digraph(),
-                                     {'vertex', 'duplicate', digraph:vertex()} |
-                                     {'edge', ({bad_vertex, digraph:vertex()} |
-                                               {bad_edge, [digraph:vertex()]}),
-                                      digraph:vertex(), digraph:vertex()})).
--spec(now_ms/0 :: () -> non_neg_integer()).
--spec(const/1 :: (A) -> thunk(A)).
--spec(ntoa/1 :: (inet:ip_address()) -> string()).
--spec(ntoab/1 :: (inet:ip_address()) -> string()).
--spec(is_process_alive/1 :: (pid()) -> boolean()).
--spec(pget/2 :: (term(), [term()]) -> term()).
--spec(pget/3 :: (term(), [term()], term()) -> term()).
--spec(pget_or_die/2 :: (term(), [term()]) -> term() | no_return()).
--spec(pset/3 :: (term(), term(), [term()]) -> term()).
--spec(format_message_queue/2 :: (any(), priority_queue:q()) -> term()).
--spec(append_rpc_all_nodes/4 :: ([node()], atom(), atom(), [any()]) -> [any()]).
--spec(os_cmd/1 :: (string()) -> string()).
--spec(is_os_process_alive/1 :: (non_neg_integer()) -> boolean()).
--spec(gb_sets_difference/2 :: (gb_sets:set(), gb_sets:set()) -> gb_sets:set()).
--spec(version/0 :: () -> string()).
--spec(otp_release/0 :: () -> string()).
--spec(which_applications/0 :: () -> [{atom(), string(), string()}]).
--spec(sequence_error/1 :: ([({'error', any()} | any())])
-                       -> {'error', any()} | any()).
--spec(json_encode/1 :: (any()) -> {'ok', string()} | {'error', any()}).
--spec(json_decode/1 :: (string()) -> {'ok', any()} | 'error').
--spec(json_to_term/1 :: (any()) -> any()).
--spec(term_to_json/1 :: (any()) -> any()).
--spec(check_expiry/1 :: (integer()) -> rabbit_types:ok_or_error(any())).
--spec(base64url/1 :: (binary()) -> string()).
--spec(interval_operation/4 ::
-        ({atom(), atom(), any()}, float(), non_neg_integer(), non_neg_integer())
-        -> {any(), non_neg_integer()}).
--spec(ensure_timer/4 :: (A, non_neg_integer(), non_neg_integer(), any()) -> A).
--spec(stop_timer/2 :: (A, non_neg_integer()) -> A).
--spec(send_after/3 :: (non_neg_integer(), pid(), any()) -> tref()).
--spec(cancel_timer/1 :: (tref()) -> 'ok').
--spec(get_parent/0 :: () -> pid()).
--spec(store_proc_name/2 :: (atom(), rabbit_types:proc_name()) -> ok).
--spec(store_proc_name/1 :: (rabbit_types:proc_type_and_name()) -> ok).
--spec(moving_average/4 :: (float(), float(), float(), float() | 'undefined')
-                          -> float()).
--spec(now_to_ms/1 :: ({non_neg_integer(),
-                       non_neg_integer(),
-                       non_neg_integer()}) -> pos_integer()).
--spec(get_env/3 :: (atom(), atom(), term())  -> term()).
--endif.
+            undefined |
+            rabbit_types:error(
+              {invalid_type, rabbit_framing:amqp_field_type()}) |
+            rabbit_types:r(K) when is_subtype(K, atom()).
+-spec rs(rabbit_types:r(atom())) -> string().
+-spec enable_cover() -> ok_or_error().
+-spec start_cover([{string(), string()} | string()]) -> 'ok'.
+-spec report_cover() -> 'ok'.
+-spec enable_cover([file:filename() | atom()]) -> ok_or_error().
+-spec report_cover([file:filename() | atom()]) -> 'ok'.
+-spec throw_on_error
+        (atom(), thunk(rabbit_types:error(any()) | {ok, A} | A)) -> A.
+-spec with_exit_handler(thunk(A), thunk(A)) -> A.
+-spec is_abnormal_exit(any()) -> boolean().
+-spec filter_exit_map(fun ((A) -> B), [A]) -> [B].
+-spec with_user(rabbit_types:username(), thunk(A)) -> A.
+-spec with_user_and_vhost
+        (rabbit_types:username(), rabbit_types:vhost(), thunk(A)) -> A.
+-spec execute_mnesia_transaction(thunk(A)) -> A.
+-spec execute_mnesia_transaction(thunk(A), fun ((A, boolean()) -> B)) -> B.
+-spec execute_mnesia_tx_with_tail
+        (thunk(fun ((boolean()) -> B))) -> B | (fun ((boolean()) -> B)).
+-spec ensure_ok(ok_or_error(), atom()) -> 'ok'.
+-spec tcp_name(atom(), inet:ip_address(), rabbit_networking:ip_port()) ->
+          atom().
+-spec format_inet_error(atom()) -> string().
+-spec upmap(fun ((A) -> B), [A]) -> [B].
+-spec map_in_order(fun ((A) -> B), [A]) -> [B].
+-spec table_filter
+        (fun ((A) -> boolean()), fun ((A, boolean()) -> 'ok'), atom()) -> [A].
+-spec dirty_read_all(atom()) -> [any()].
+-spec dirty_foreach_key(fun ((any()) -> any()), atom()) ->
+          'ok' | 'aborted'.
+-spec dirty_dump_log(file:filename()) -> ok_or_error().
+-spec format(string(), [any()]) -> string().
+-spec format_many([{string(), [any()]}]) -> string().
+-spec format_stderr(string(), [any()]) -> 'ok'.
+-spec unfold (fun ((A) -> ({'true', B, A} | 'false')), A) -> {[B], A}.
+-spec ceil(number()) -> integer().
+-spec queue_fold(fun ((any(), B) -> B), B, ?QUEUE_TYPE()) -> B.
+-spec sort_field_table(rabbit_framing:amqp_table()) ->
+          rabbit_framing:amqp_table().
+-spec pid_to_string(pid()) -> string().
+-spec string_to_pid(string()) -> pid().
+-spec pid_change_node(pid(), node()) -> pid().
+-spec node_to_fake_pid(atom()) -> pid().
+-spec version_compare(string(), string()) -> 'lt' | 'eq' | 'gt'.
+-spec version_compare
+        (string(), string(), ('lt' | 'lte' | 'eq' | 'gte' | 'gt')) -> boolean().
+-spec version_minor_equivalent(string(), string()) -> boolean().
+-spec dict_cons(any(), any(), ?DICT_TYPE()) -> ?DICT_TYPE().
+-spec orddict_cons(any(), any(), orddict:orddict()) -> orddict:orddict().
+-spec gb_trees_cons(any(), any(), gb_trees:tree()) -> gb_trees:tree().
+-spec gb_trees_fold(fun ((any(), any(), A) -> A), A, gb_trees:tree()) -> A.
+-spec gb_trees_foreach(fun ((any(), any()) -> any()), gb_trees:tree()) ->
+          'ok'.
+-spec all_module_attributes(atom()) -> [{atom(), atom(), [term()]}].
+-spec build_acyclic_graph
+        (graph_vertex_fun(), graph_edge_fun(), [{atom(), [term()]}]) ->
+            rabbit_types:ok_or_error2(
+              digraph:graph(),
+              {'vertex', 'duplicate', digraph:vertex()} |
+              {'edge',
+                ({bad_vertex, digraph:vertex()} |
+                 {bad_edge, [digraph:vertex()]}),
+                digraph:vertex(), digraph:vertex()}).
+-spec const(A) -> thunk(A).
+-spec ntoa(inet:ip_address()) -> string().
+-spec ntoab(inet:ip_address()) -> string().
+-spec is_process_alive(pid()) -> boolean().
+-spec pget(term(), [term()]) -> term().
+-spec pget(term(), [term()], term()) -> term().
+-spec pget_or_die(term(), [term()]) -> term() | no_return().
+-spec pmerge(term(), term(), [term()]) -> [term()].
+-spec plmerge([term()], [term()]) -> [term()].
+-spec pset(term(), term(), [term()]) -> [term()].
+-spec format_message_queue(any(), priority_queue:q()) -> term().
+-spec append_rpc_all_nodes([node()], atom(), atom(), [any()]) -> [any()].
+-spec os_cmd(string()) -> string().
+-spec is_os_process_alive(non_neg_integer()) -> boolean().
+-spec gb_sets_difference(?GB_SET_TYPE(), ?GB_SET_TYPE()) -> ?GB_SET_TYPE().
+-spec version() -> string().
+-spec otp_release() -> string().
+-spec which_applications() -> [{atom(), string(), string()}].
+-spec sequence_error([({'error', any()} | any())]) ->
+          {'error', any()} | any().
+-spec json_encode(any()) -> {'ok', string()} | {'error', any()}.
+-spec json_decode(string()) -> {'ok', any()} | 'error'.
+-spec json_to_term(any()) -> any().
+-spec term_to_json(any()) -> any().
+-spec check_expiry(integer()) -> rabbit_types:ok_or_error(any()).
+-spec base64url(binary()) -> string().
+-spec interval_operation
+        ({atom(), atom(), any()}, float(), non_neg_integer(), non_neg_integer(),
+         non_neg_integer()) ->
+            {any(), non_neg_integer()}.
+-spec ensure_timer(A, non_neg_integer(), non_neg_integer(), any()) -> A.
+-spec stop_timer(A, non_neg_integer()) -> A.
+-spec send_after(non_neg_integer(), pid(), any()) -> tref().
+-spec cancel_timer(tref()) -> 'ok'.
+-spec get_parent() -> pid().
+-spec store_proc_name(atom(), rabbit_types:proc_name()) -> ok.
+-spec store_proc_name(rabbit_types:proc_type_and_name()) -> ok.
+-spec get_proc_name() -> rabbit_types:proc_name().
+-spec moving_average(float(), float(), float(), float() | 'undefined') ->
+          float().
+-spec get_env(atom(), atom(), term())  -> term().
+-spec get_channel_operation_timeout() -> non_neg_integer().
+-spec random(non_neg_integer()) -> non_neg_integer().
+-spec rpc_call(node(), atom(), atom(), [any()]) -> any().
+-spec rpc_call(node(), atom(), atom(), [any()], number()) -> any().
+-spec rpc_call
+        (node(), atom(), atom(), [any()], reference(), pid(), number()) -> any().
+-spec report_default_thread_pool_size() -> 'ok'.
+-spec get_gc_info(pid()) -> integer().
 
 %%----------------------------------------------------------------------------
 
@@ -310,16 +314,23 @@ absent(#amqqueue{name = QueueName, pid = QPid, durable = true}, nodedown) ->
 
 absent(#amqqueue{name = QueueName}, crashed) ->
     protocol_error(not_found,
-                   "~s has crashed and failed to restart", [rs(QueueName)]).
+                   "~s has crashed and failed to restart", [rs(QueueName)]);
 
-type_class(byte)      -> int;
-type_class(short)     -> int;
-type_class(signedint) -> int;
-type_class(long)      -> int;
-type_class(decimal)   -> int;
-type_class(float)     -> float;
-type_class(double)    -> float;
-type_class(Other)     -> Other.
+absent(#amqqueue{name = QueueName}, timeout) ->
+    protocol_error(not_found,
+                   "failed to perform operation on ~s due to timeout", [rs(QueueName)]).
+
+type_class(byte)          -> int;
+type_class(short)         -> int;
+type_class(signedint)     -> int;
+type_class(long)          -> int;
+type_class(decimal)       -> int;
+type_class(unsignedbyte)  -> int;
+type_class(unsignedshort) -> int;
+type_class(unsignedint)   -> int;
+type_class(float)         -> float;
+type_class(double)        -> float;
+type_class(Other)         -> Other.
 
 assert_args_equivalence(Orig, New, Name, Keys) ->
     [assert_args_equivalence1(Orig, New, Name, Key) || Key <- Keys],
@@ -652,18 +663,7 @@ format_many(List) ->
     lists:flatten([io_lib:format(F ++ "~n", A) || {F, A} <- List]).
 
 format_stderr(Fmt, Args) ->
-    case os:type() of
-        {unix, _} ->
-            Port = open_port({fd, 0, 2}, [out]),
-            port_command(Port, io_lib:format(Fmt, Args)),
-            port_close(Port);
-        {win32, _} ->
-            %% stderr on Windows is buffered and I can't figure out a
-            %% way to trigger a fflush(stderr) in Erlang. So rather
-            %% than risk losing output we write to stdout instead,
-            %% which appears to be unbuffered.
-            io:format(Fmt, Args)
-    end,
+    io:format(standard_error, Fmt, Args),
     ok.
 
 unfold(Fun, Init) ->
@@ -692,6 +692,9 @@ queue_fold(Fun, Init, Q) ->
 sort_field_table(Arguments) ->
     lists:keysort(1, Arguments).
 
+atom_to_binary(A) ->
+    list_to_binary(atom_to_list(A)).
+
 %% This provides a string representation of a pid that is the same
 %% regardless of what node we are running on. The representation also
 %% permits easy identification of the pid's node.
@@ -734,53 +737,40 @@ compose_pid(Node, Cre, Id, Ser) ->
     <<131,NodeEnc/binary>> = term_to_binary(Node),
     binary_to_term(<<131,103,NodeEnc/binary,Id:32,Ser:32,Cre:8>>).
 
-version_compare(A, B, lte) ->
-    case version_compare(A, B) of
-        eq -> true;
-        lt -> true;
-        gt -> false
-    end;
-version_compare(A, B, gte) ->
-    case version_compare(A, B) of
-        eq -> true;
-        gt -> true;
-        lt -> false
-    end;
-version_compare(A, B, Result) ->
-    Result =:= version_compare(A, B).
-
-version_compare(A, A) ->
-    eq;
-version_compare([], [$0 | B]) ->
-    version_compare([], dropdot(B));
-version_compare([], _) ->
-    lt; %% 2.3 < 2.3.1
-version_compare([$0 | A], []) ->
-    version_compare(dropdot(A), []);
-version_compare(_, []) ->
-    gt; %% 2.3.1 > 2.3
-version_compare(A,  B) ->
-    {AStr, ATl} = lists:splitwith(fun (X) -> X =/= $. end, A),
-    {BStr, BTl} = lists:splitwith(fun (X) -> X =/= $. end, B),
-    ANum = list_to_integer(AStr),
-    BNum = list_to_integer(BStr),
-    if ANum =:= BNum -> version_compare(dropdot(ATl), dropdot(BTl));
-       ANum < BNum   -> lt;
-       ANum > BNum   -> gt
+version_compare(A, B, eq)  -> ec_semver:eql(A, B);
+version_compare(A, B, lt)  -> ec_semver:lt(A, B);
+version_compare(A, B, lte) -> ec_semver:lte(A, B);
+version_compare(A, B, gt)  -> ec_semver:gt(A, B);
+version_compare(A, B, gte) -> ec_semver:gte(A, B).
+
+version_compare(A, B) ->
+    case version_compare(A, B, lt) of
+        true -> lt;
+        false -> case version_compare(A, B, gt) of
+                     true -> gt;
+                     false -> eq
+                 end
     end.
 
 %% a.b.c and a.b.d match, but a.b.c and a.d.e don't. If
 %% versions do not match that pattern, just compare them.
+%%
+%% Special case for 3.6.6 because it introduced a change to the schema.
+%% e.g. 3.6.6 is not compatible with 3.6.5
+%% This special case can be removed once 3.6.x reaches EOL
 version_minor_equivalent(A, B) ->
-    {ok, RE} = re:compile("^(\\d+\\.\\d+)(\\.\\d+)\$"),
-    Opts = [{capture, all_but_first, list}],
-    case {re:run(A, RE, Opts), re:run(B, RE, Opts)} of
-        {{match, [A1|_]}, {match, [B1|_]}} -> A1 =:= B1;
-        _                                  -> A =:= B
+    {{MajA, MinA, PatchA, _}, _} = ec_semver:normalize(ec_semver:parse(A)),
+    {{MajB, MinB, PatchB, _}, _} = ec_semver:normalize(ec_semver:parse(B)),
+
+    case {MajA, MinA, MajB, MinB} of
+        {3, 6, 3, 6} -> if
+                            PatchA >= 6 -> PatchB >= 6;
+                            PatchA < 6  -> PatchB < 6;
+                            true -> false
+                        end;
+        _            -> MajA =:= MajB andalso MinA =:= MinB
     end.
 
-dropdot(A) -> lists:dropwhile(fun (X) -> X =:= $. end, A).
-
 dict_cons(Key, Value, Dict) ->
     dict:update(Key, fun (List) -> [Value | List] end, [Value], Dict).
 
@@ -804,9 +794,6 @@ gb_trees_fold1(Fun, Acc, {Key, Val, It}) ->
 gb_trees_foreach(Fun, Tree) ->
     gb_trees_fold(fun (Key, Val, Acc) -> Fun(Key, Val), Acc end, ok, Tree).
 
-now_ms() ->
-    timer:now_diff(now(), {0,0,0}) div 1000.
-
 module_attributes(Module) ->
     case catch Module:module_info(attributes) of
         {'EXIT', {undef, [{Module, module_info, _} | _]}} ->
@@ -882,8 +869,20 @@ is_process_alive(Pid) ->
     lists:member(Node, [node() | nodes()]) andalso
         rpc:call(Node, erlang, is_process_alive, [Pid]) =:= true.
 
-pget(K, P) -> proplists:get_value(K, P).
-pget(K, P, D) -> proplists:get_value(K, P, D).
+pget(K, P) ->
+    case lists:keyfind(K, 1, P) of
+        {K, V} ->
+            V;
+        _ ->
+            undefined
+    end.
+pget(K, P, D) ->
+    case lists:keyfind(K, 1, P) of
+        {K, V} ->
+            V;
+        _ ->
+            D
+    end.
 
 pget_or_die(K, P) ->
     case proplists:get_value(K, P) of
@@ -891,6 +890,21 @@ pget_or_die(K, P) ->
         V         -> V
     end.
 
+%% property merge 
+pmerge(Key, Val, List) ->
+      case proplists:is_defined(Key, List) of
+              true -> List;
+              _    -> [{Key, Val} | List]
+      end.
+
+%% proplists merge
+plmerge(P1, P2) ->
+    dict:to_list(dict:merge(fun(_, V, _) ->
+                                V 
+                            end, 
+                            dict:from_list(P1), 
+                            dict:from_list(P2))).
+
 pset(Key, Value, List) -> [{Key, Value} | proplists:delete(Key, List)].
 
 format_message_queue(_Opt, MQ) ->
@@ -991,10 +1005,11 @@ otp_release() ->
 
 %% application:which_applications(infinity) is dangerous, since it can
 %% cause deadlocks on shutdown. So we have to use a timeout variant,
-%% but w/o creating spurious timeout errors.
+%% but w/o creating spurious timeout errors. The timeout value is twice
+%% that of gen_server:call/2.
 which_applications() ->
     try
-        application:which_applications()
+        application:which_applications(10000)
     catch
         exit:{timeout, _} -> []
     end.
@@ -1028,8 +1043,9 @@ json_to_term(V) when is_binary(V) orelse is_number(V) orelse V =:= null orelse
                      V =:= true orelse V =:= false ->
     V.
 
-%% This has the flaw that empty lists will never be JSON objects, so use with
-%% care.
+%% You can use the empty_struct value to represent empty JSON objects.
+term_to_json(empty_struct) ->
+    {struct, []};
 term_to_json([{_, _}|_] = L) ->
     {struct, [{K, term_to_json(V)} || {K, V} <- L]};
 term_to_json(L) when is_list(L) ->
@@ -1038,9 +1054,6 @@ term_to_json(V) when is_binary(V) orelse is_number(V) orelse V =:= null orelse
                      V =:= true orelse V =:= false ->
     V.
 
-now_to_ms({Mega, Sec, Micro}) ->
-    (Mega * 1000000 * 1000000 + Sec * 1000000 + Micro) div 1000.
-
 check_expiry(N) when N < 0                 -> {error, {value_negative, N}};
 check_expiry(_N)                           -> ok.
 
@@ -1055,12 +1068,13 @@ base64url(In) ->
 %% want it to take more than MaxRatio of IdealInterval. So if it takes
 %% more then you want to run it less often. So we time how long it
 %% takes to run, and then suggest how long you should wait before
-%% running it again. Times are in millis.
-interval_operation({M, F, A}, MaxRatio, IdealInterval, LastInterval) ->
+%% running it again with a user specified max interval. Times are in millis.
+interval_operation({M, F, A}, MaxRatio, MaxInterval, IdealInterval, LastInterval) ->
     {Micros, Res} = timer:tc(M, F, A),
     {Res, case {Micros > 1000 * (MaxRatio * IdealInterval),
                 Micros > 1000 * (MaxRatio * LastInterval)} of
-              {true,  true}  -> round(LastInterval * 1.5);
+              {true,  true}  -> lists:min([MaxInterval,
+                                           round(LastInterval * 1.5)]);
               {true,  false} -> LastInterval;
               {false, false} -> lists:max([IdealInterval,
                                            round(LastInterval / 1.5)])
@@ -1098,6 +1112,14 @@ cancel_timer({timer, Ref})  -> {ok, cancel} = timer:cancel(Ref),
 store_proc_name(Type, ProcName) -> store_proc_name({Type, ProcName}).
 store_proc_name(TypeProcName)   -> put(process_name, TypeProcName).
 
+get_proc_name() ->
+    case get(process_name) of
+        undefined ->
+            undefined;
+        {_Type, Name} ->
+            {ok, Name}
+    end.
+
 %% application:get_env/3 is only available in R16B01 or later.
 get_env(Application, Key, Def) ->
     case application:get_env(Application, Key) of
@@ -1105,6 +1127,13 @@ get_env(Application, Key, Def) ->
         undefined -> Def
     end.
 
+get_channel_operation_timeout() ->
+    %% Default channel_operation_timeout set to net_ticktime + 10s to
+    %% give allowance for any down messages to be received first,
+    %% whenever it is used for cross-node calls with timeouts.
+    Default = (net_kernel:get_net_ticktime() + 10) * 1000,
+    application:get_env(rabbit, channel_operation_timeout, Default).
+
 moving_average(_Time, _HalfLife, Next, undefined) ->
     Next;
 %% We want the Weight to decrease as Time goes up (since Weight is the
@@ -1124,6 +1153,57 @@ moving_average(Time,  HalfLife,  Next, Current) ->
     Weight = math:exp(Time * math:log(0.5) / HalfLife),
     Next * (1 - Weight) + Current * Weight.
 
+random(N) ->
+    rand_compat:uniform(N).
+
+%% Moved from rabbit/src/rabbit_cli.erl
+%% If the server we are talking to has non-standard net_ticktime, and
+%% our connection lasts a while, we could get disconnected because of
+%% a timeout unless we set our ticktime to be the same. So let's do
+%% that.
+rpc_call(Node, Mod, Fun, Args) ->
+    rpc_call(Node, Mod, Fun, Args, ?RPC_TIMEOUT).
+
+rpc_call(Node, Mod, Fun, Args, Timeout) ->
+    case rpc:call(Node, net_kernel, get_net_ticktime, [], Timeout) of
+        {badrpc, _} = E -> E;
+        Time            -> net_kernel:set_net_ticktime(Time, 0),
+                           rpc:call(Node, Mod, Fun, Args, Timeout)
+    end.
+
+rpc_call(Node, Mod, Fun, Args, Ref, Pid, Timeout) ->
+    rpc_call(Node, Mod, Fun, Args++[Ref, Pid], Timeout).
+
+guess_number_of_cpu_cores() ->
+    case erlang:system_info(logical_processors_available) of
+        unknown -> % Happens on Mac OS X.
+            erlang:system_info(schedulers);
+        N -> N
+    end.
+
+%% Discussion of choosen values is at
+%% https://github.com/rabbitmq/rabbitmq-server/issues/151
+guess_default_thread_pool_size() ->
+    PoolSize = 16 * guess_number_of_cpu_cores(),
+    min(1024, max(64, PoolSize)).
+
+report_default_thread_pool_size() ->
+    io:format("~b", [guess_default_thread_pool_size()]),
+    erlang:halt(0),
+    ok.
+
+get_gc_info(Pid) ->
+    {garbage_collection, GC} = erlang:process_info(Pid, garbage_collection),
+    case proplists:get_value(max_heap_size, GC) of
+        I when is_integer(I) ->
+            GC;
+        undefined ->
+            GC;
+        Map ->
+            lists:keyreplace(max_heap_size, 1, GC,
+                             {max_heap_size, maps:get(size, Map)})
+    end.
+
 %% -------------------------------------------------------------------------
 %% Begin copypasta from gen_server2.erl
 
similarity index 78%
rename from rabbitmq-server/src/rabbit_msg_store_index.erl
rename to deps/rabbit_common/src/rabbit_msg_store_index.erl
index 0c7a37bcd3dfedd9cc8c4c73034060e6b4a8cca3..b33cd4f81dd15ea983e8ec45435918040d2a7f48 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_msg_store_index).
 
 -include("rabbit_msg_store.hrl").
 
--ifdef(use_specs).
-
 -type(dir() :: any()).
 -type(index_state() :: any()).
 -type(keyvalue() :: any()).
 -callback delete_object(keyvalue(), index_state()) -> 'ok'.
 -callback delete_by_file(fieldvalue(), index_state()) -> 'ok'.
 -callback terminate(index_state()) -> any().
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{new,            1},
-     {recover,        1},
-     {lookup,         2},
-     {insert,         2},
-     {update,         2},
-     {update_fields,  3},
-     {delete,         2},
-     {delete_by_file, 2},
-     {terminate,      1}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 60%
rename from rabbitmq-server/src/rabbit_net.erl
rename to deps/rabbit_common/src/rabbit_net.erl
index 1731d489fa66982672752c05af9e894a54099adc..792eb55fb8120566bcfd3528d539ab1e370427a1 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_net).
 -include("rabbit.hrl").
 
+-ifdef(define_tls_atom_version).
+%% In Erlang R16B03, tls_atom_version() is defined in ssl_internal.hrl,
+%% which is not included by ssl_api.hrl. Instead of including it here,
+%% we redefine it to avoid too much pollution.
+-type tls_atom_version() :: sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2'.
+-endif.
+
+-include_lib("ssl/src/ssl_api.hrl").
+
 -export([is_ssl/1, ssl_info/1, controlling_process/2, getstat/2,
          recv/1, sync_recv/2, async_recv/3, port_command/2, getopts/2,
          setopts/2, send/2, close/1, fast_close/1, sockname/1, peername/1,
-         peercert/1, connection_string/2, socket_ends/2, is_loopback/1]).
+         peercert/1, connection_string/2, socket_ends/2, is_loopback/1,
+         accept_ack/2]).
 
 %%---------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([socket/0]).
 
--type(stat_option() ::
+-type stat_option() ::
         'recv_cnt' | 'recv_max' | 'recv_avg' | 'recv_oct' | 'recv_dvi' |
-        'send_cnt' | 'send_max' | 'send_avg' | 'send_oct' | 'send_pend').
--type(ok_val_or_error(A) :: rabbit_types:ok_or_error2(A, any())).
--type(ok_or_any_error() :: rabbit_types:ok_or_error(any())).
--type(socket() :: port() | #ssl_socket{}).
--type(opts() :: [{atom(), any()} |
-                 {raw, non_neg_integer(), non_neg_integer(), binary()}]).
--type(host_or_ip() :: binary() | inet:ip_address()).
--spec(is_ssl/1 :: (socket()) -> boolean()).
--spec(ssl_info/1 :: (socket())
-                    -> 'nossl' | ok_val_or_error(
-                                   {atom(), {atom(), atom(), atom()}})).
--spec(controlling_process/2 :: (socket(), pid()) -> ok_or_any_error()).
--spec(getstat/2 ::
-        (socket(), [stat_option()])
-        -> ok_val_or_error([{stat_option(), integer()}])).
--spec(recv/1 :: (socket()) ->
-                     {'data', [char()] | binary()} | 'closed' |
-                     rabbit_types:error(any()) | {'other', any()}).
--spec(sync_recv/2 :: (socket(), integer()) -> rabbit_types:ok(binary()) |
-                                              rabbit_types:error(any())).
--spec(async_recv/3 ::
-        (socket(), integer(), timeout()) -> rabbit_types:ok(any())).
--spec(port_command/2 :: (socket(), iolist()) -> 'true').
--spec(getopts/2 :: (socket(), [atom() | {raw,
-                                         non_neg_integer(),
-                                         non_neg_integer(),
-                                         non_neg_integer() | binary()}])
-                   -> ok_val_or_error(opts())).
--spec(setopts/2 :: (socket(), opts()) -> ok_or_any_error()).
--spec(send/2 :: (socket(), binary() | iolist()) -> ok_or_any_error()).
--spec(close/1 :: (socket()) -> ok_or_any_error()).
--spec(fast_close/1 :: (socket()) -> ok_or_any_error()).
--spec(sockname/1 ::
-        (socket())
-        -> ok_val_or_error({inet:ip_address(), rabbit_networking:ip_port()})).
--spec(peername/1 ::
-        (socket())
-        -> ok_val_or_error({inet:ip_address(), rabbit_networking:ip_port()})).
--spec(peercert/1 ::
-        (socket())
-        -> 'nossl' | ok_val_or_error(rabbit_ssl:certificate())).
--spec(connection_string/2 ::
-        (socket(), 'inbound' | 'outbound') -> ok_val_or_error(string())).
--spec(socket_ends/2 ::
-        (socket(), 'inbound' | 'outbound')
-        -> ok_val_or_error({host_or_ip(), rabbit_networking:ip_port(),
-                            host_or_ip(), rabbit_networking:ip_port()})).
--spec(is_loopback/1 :: (socket() | inet:ip_address()) -> boolean()).
-
--endif.
+        'send_cnt' | 'send_max' | 'send_avg' | 'send_oct' | 'send_pend'.
+-type ok_val_or_error(A) :: rabbit_types:ok_or_error2(A, any()).
+-type ok_or_any_error() :: rabbit_types:ok_or_error(any()).
+-type socket() :: port() | ssl:sslsocket().
+-type opts() :: [{atom(), any()} |
+                 {raw, non_neg_integer(), non_neg_integer(), binary()}].
+-type host_or_ip() :: binary() | inet:ip_address().
+-spec is_ssl(socket()) -> boolean().
+-spec ssl_info(socket()) -> 'nossl' | ok_val_or_error([{atom(), any()}]).
+-spec controlling_process(socket(), pid()) -> ok_or_any_error().
+-spec getstat(socket(), [stat_option()]) ->
+          ok_val_or_error([{stat_option(), integer()}]).
+-spec recv(socket()) ->
+          {'data', [char()] | binary()} |
+          'closed' |
+          rabbit_types:error(any()) |
+          {'other', any()}.
+-spec sync_recv(socket(), integer()) ->
+          rabbit_types:ok(binary()) |
+          rabbit_types:error(any()).
+-spec async_recv(socket(), integer(), timeout()) ->
+          rabbit_types:ok(any()).
+-spec port_command(socket(), iolist()) -> 'true'.
+-spec getopts
+        (socket(),
+         [atom() |
+          {raw, non_neg_integer(), non_neg_integer(),
+           non_neg_integer() | binary()}]) ->
+            ok_val_or_error(opts()).
+-spec setopts(socket(), opts()) -> ok_or_any_error().
+-spec send(socket(), binary() | iolist()) -> ok_or_any_error().
+-spec close(socket()) -> ok_or_any_error().
+-spec fast_close(socket()) -> ok_or_any_error().
+-spec sockname(socket()) ->
+          ok_val_or_error({inet:ip_address(), rabbit_networking:ip_port()}).
+-spec peername(socket()) ->
+          ok_val_or_error({inet:ip_address(), rabbit_networking:ip_port()}).
+-spec peercert(socket()) ->
+          'nossl' | ok_val_or_error(rabbit_ssl:certificate()).
+-spec connection_string(socket(), 'inbound' | 'outbound') ->
+          ok_val_or_error(string()).
+-spec socket_ends(socket(), 'inbound' | 'outbound') ->
+          ok_val_or_error({host_or_ip(), rabbit_networking:ip_port(),
+                           host_or_ip(), rabbit_networking:ip_port()}).
+-spec is_loopback(socket() | inet:ip_address()) -> boolean().
+-spec accept_ack(any(), socket()) -> ok.
 
 %%---------------------------------------------------------------------------
 
 -define(SSL_CLOSE_TIMEOUT, 5000).
 
--define(IS_SSL(Sock), is_record(Sock, ssl_socket)).
+-define(IS_SSL(Sock), is_record(Sock, sslsocket)).
 
 is_ssl(Sock) -> ?IS_SSL(Sock).
 
+%% Seems hackish. Is hackish. But the structure is stable and
+%% kept this way for backward compatibility reasons. We need
+%% it for two reasons: there are no ssl:getstat(Sock) function,
+%% and no ssl:close(Timeout) function. Both of them are being
+%% worked on as we speak.
+ssl_get_socket(Sock) ->
+    element(2, element(2, Sock)).
+
 ssl_info(Sock) when ?IS_SSL(Sock) ->
-    ssl:connection_info(Sock#ssl_socket.ssl);
+    ssl_compat:connection_information(Sock);
 ssl_info(_Sock) ->
     nossl.
 
 controlling_process(Sock, Pid) when ?IS_SSL(Sock) ->
-    ssl:controlling_process(Sock#ssl_socket.ssl, Pid);
+    ssl:controlling_process(Sock, Pid);
 controlling_process(Sock, Pid) when is_port(Sock) ->
     gen_tcp:controlling_process(Sock, Pid).
 
 getstat(Sock, Stats) when ?IS_SSL(Sock) ->
-    inet:getstat(Sock#ssl_socket.tcp, Stats);
+    inet:getstat(ssl_get_socket(Sock), Stats);
 getstat(Sock, Stats) when is_port(Sock) ->
     inet:getstat(Sock, Stats).
 
 recv(Sock) when ?IS_SSL(Sock) ->
-    recv(Sock#ssl_socket.ssl, {ssl, ssl_closed, ssl_error});
+    recv(Sock, {ssl, ssl_closed, ssl_error});
 recv(Sock) when is_port(Sock) ->
     recv(Sock, {tcp, tcp_closed, tcp_error}).
 
@@ -118,7 +130,7 @@ recv(S, {DataTag, ClosedTag, ErrorTag}) ->
     end.
 
 sync_recv(Sock, Length) when ?IS_SSL(Sock) ->
-    ssl:recv(Sock#ssl_socket.ssl, Length);
+    ssl:recv(Sock, Length);
 sync_recv(Sock, Length) ->
     gen_tcp:recv(Sock, Length).
 
@@ -127,7 +139,7 @@ async_recv(Sock, Length, Timeout) when ?IS_SSL(Sock) ->
     Ref = make_ref(),
 
     spawn(fun () -> Pid ! {inet_async, Sock, Ref,
-                           ssl:recv(Sock#ssl_socket.ssl, Length, Timeout)}
+                           ssl:recv(Sock, Length, Timeout)}
           end),
 
     {ok, Ref};
@@ -137,7 +149,7 @@ async_recv(Sock, Length, Timeout) when is_port(Sock) ->
     prim_inet:async_recv(Sock, Length, Timeout).
 
 port_command(Sock, Data) when ?IS_SSL(Sock) ->
-    case ssl:send(Sock#ssl_socket.ssl, Data) of
+    case ssl:send(Sock, Data) of
         ok              -> self() ! {inet_reply, Sock, ok},
                            true;
         {error, Reason} -> erlang:error(Reason)
@@ -146,19 +158,19 @@ port_command(Sock, Data) when is_port(Sock) ->
     erlang:port_command(Sock, Data).
 
 getopts(Sock, Options) when ?IS_SSL(Sock) ->
-    ssl:getopts(Sock#ssl_socket.ssl, Options);
+    ssl:getopts(Sock, Options);
 getopts(Sock, Options) when is_port(Sock) ->
     inet:getopts(Sock, Options).
 
 setopts(Sock, Options) when ?IS_SSL(Sock) ->
-    ssl:setopts(Sock#ssl_socket.ssl, Options);
+    ssl:setopts(Sock, Options);
 setopts(Sock, Options) when is_port(Sock) ->
     inet:setopts(Sock, Options).
 
-send(Sock, Data) when ?IS_SSL(Sock) -> ssl:send(Sock#ssl_socket.ssl, Data);
+send(Sock, Data) when ?IS_SSL(Sock) -> ssl:send(Sock, Data);
 send(Sock, Data) when is_port(Sock) -> gen_tcp:send(Sock, Data).
 
-close(Sock)      when ?IS_SSL(Sock) -> ssl:close(Sock#ssl_socket.ssl);
+close(Sock)      when ?IS_SSL(Sock) -> ssl:close(Sock);
 close(Sock)      when is_port(Sock) -> gen_tcp:close(Sock).
 
 fast_close(Sock) when ?IS_SSL(Sock) ->
@@ -173,7 +185,7 @@ fast_close(Sock) when ?IS_SSL(Sock) ->
     %% 0), which may never return if the client doesn't send a FIN or
     %% that gets swallowed by the network. Since there is no timeout
     %% variant of ssl:close, we construct our own.
-    {Pid, MRef} = spawn_monitor(fun () -> ssl:close(Sock#ssl_socket.ssl) end),
+    {Pid, MRef} = spawn_monitor(fun () -> ssl:close(Sock) end),
     erlang:send_after(?SSL_CLOSE_TIMEOUT, self(), {Pid, ssl_close_timeout}),
     receive
         {Pid, ssl_close_timeout} ->
@@ -182,18 +194,18 @@ fast_close(Sock) when ?IS_SSL(Sock) ->
         {'DOWN', MRef, process, Pid, _Reason} ->
             ok
     end,
-    catch port_close(Sock#ssl_socket.tcp),
+    catch port_close(ssl_get_socket(Sock)),
     ok;
 fast_close(Sock) when is_port(Sock) ->
     catch port_close(Sock), ok.
 
-sockname(Sock)   when ?IS_SSL(Sock) -> ssl:sockname(Sock#ssl_socket.ssl);
+sockname(Sock)   when ?IS_SSL(Sock) -> ssl:sockname(Sock);
 sockname(Sock)   when is_port(Sock) -> inet:sockname(Sock).
 
-peername(Sock)   when ?IS_SSL(Sock) -> ssl:peername(Sock#ssl_socket.ssl);
+peername(Sock)   when ?IS_SSL(Sock) -> ssl:peername(Sock);
 peername(Sock)   when is_port(Sock) -> inet:peername(Sock).
 
-peercert(Sock)   when ?IS_SSL(Sock) -> ssl:peercert(Sock#ssl_socket.ssl);
+peercert(Sock)   when ?IS_SSL(Sock) -> ssl:peercert(Sock);
 peercert(Sock)   when is_port(Sock) -> nossl.
 
 connection_string(Sock, Direction) ->
@@ -244,3 +256,19 @@ is_loopback({0,0,0,0,0,65535,AB,CD}) -> is_loopback(ipv4(AB, CD));
 is_loopback(_)                       -> false.
 
 ipv4(AB, CD) -> {AB bsr 8, AB band 255, CD bsr 8, CD band 255}.
+
+accept_ack(Ref, Sock) ->
+    ok = ranch:accept_ack(Ref),
+    case tune_buffer_size(Sock) of
+        ok         -> ok;
+        {error, _} -> rabbit_net:fast_close(Sock),
+                      exit(normal)
+    end,
+    ok = file_handle_cache:obtain().
+
+tune_buffer_size(Sock) ->
+    case getopts(Sock, [sndbuf, recbuf, buffer]) of
+        {ok, BufSizes} -> BufSz = lists:max([Sz || {_Opt, Sz} <- BufSizes]),
+                          setopts(Sock, [{buffer, BufSz}]);
+        Error          -> Error
+    end.
similarity index 63%
rename from rabbitmq-server/src/rabbit_networking.erl
rename to deps/rabbit_common/src/rabbit_networking.erl
index f95f8c5818703b11a63749b8e683ab326ccc6502..5bf30ff5e569d9638c55a1c34527f6658bcd75c4 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_networking).
 
--export([boot/0, start/0, start_tcp_listener/1, start_ssl_listener/2,
+%% This module contains various functions that deal with networking,
+%% TCP and TLS listeners, and connection information.
+%%
+%% It also contains a boot step â€” boot/0 â€” that starts networking machinery.
+%% This module primarily covers AMQP 0-9-1 but some bits are reused in
+%% plugins that provide protocol support, e.g. STOMP or MQTT.
+%%
+%% Functions in this module take care of normalising TCP listener options,
+%% including dual IP stack cases, and starting the AMQP 0-9-1 listener(s).
+%%
+%% See also tcp_listener_sup and tcp_listener.
+
+-export([boot/0, start_tcp_listener/2, start_ssl_listener/3,
          stop_tcp_listener/1, on_node_down/1, active_listeners/0,
          node_listeners/1, register_connection/1, unregister_connection/1,
          connections/0, connection_info_keys/0,
          connection_info/1, connection_info/2,
-         connection_info_all/0, connection_info_all/1,
+         connection_info_all/0, connection_info_all/1, connection_info_all/3,
          close_connection/2, force_connection_event_refresh/1, tcp_host/1]).
 
-%%used by TCP-based transports, e.g. STOMP adapter
--export([tcp_listener_addresses/1, tcp_listener_spec/6,
-         ensure_ssl/0, fix_ssl_options/1, poodle_check/1, ssl_transform_fun/1]).
+%% Used by TCP-based transports, e.g. STOMP adapter
+-export([tcp_listener_addresses/1, tcp_listener_spec/9,
+         ensure_ssl/0, fix_ssl_options/1, poodle_check/1]).
 
--export([tcp_listener_started/3, tcp_listener_stopped/3,
-         start_client/1, start_ssl_client/2]).
+-export([tcp_listener_started/3, tcp_listener_stopped/3]).
 
 %% Internal
 -export([connections_local/0]).
 -include("rabbit.hrl").
 -include_lib("kernel/include/inet.hrl").
 
--define(FIRST_TEST_BIND_PORT, 10000).
+%% IANA-suggested ephemeral port range is 49152 to 65535
+-define(FIRST_TEST_BIND_PORT, 49152).
 
 %% POODLE
 -define(BAD_SSL_PROTOCOL_VERSIONS, [sslv3]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([ip_port/0, hostname/0]).
 
--type(hostname() :: inet:hostname()).
--type(ip_port() :: inet:port_number()).
+-type hostname() :: inet:hostname().
+-type ip_port() :: inet:port_number().
 
--type(family() :: atom()).
--type(listener_config() :: ip_port() |
+-type family() :: atom().
+-type listener_config() :: ip_port() |
                            {hostname(), ip_port()} |
-                           {hostname(), ip_port(), family()}).
--type(address() :: {inet:ip_address(), ip_port(), family()}).
--type(name_prefix() :: atom()).
--type(protocol() :: atom()).
--type(label() :: string()).
-
--spec(start/0 :: () -> 'ok').
--spec(start_tcp_listener/1 :: (listener_config()) -> 'ok').
--spec(start_ssl_listener/2 ::
-        (listener_config(), rabbit_types:infos()) -> 'ok').
--spec(stop_tcp_listener/1 :: (listener_config()) -> 'ok').
--spec(active_listeners/0 :: () -> [rabbit_types:listener()]).
--spec(node_listeners/1 :: (node()) -> [rabbit_types:listener()]).
--spec(register_connection/1 :: (pid()) -> ok).
--spec(unregister_connection/1 :: (pid()) -> ok).
--spec(connections/0 :: () -> [rabbit_types:connection()]).
--spec(connections_local/0 :: () -> [rabbit_types:connection()]).
--spec(connection_info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(connection_info/1 ::
-        (rabbit_types:connection()) -> rabbit_types:infos()).
--spec(connection_info/2 ::
-        (rabbit_types:connection(), rabbit_types:info_keys())
-        -> rabbit_types:infos()).
--spec(connection_info_all/0 :: () -> [rabbit_types:infos()]).
--spec(connection_info_all/1 ::
-        (rabbit_types:info_keys()) -> [rabbit_types:infos()]).
--spec(close_connection/2 :: (pid(), string()) -> 'ok').
--spec(force_connection_event_refresh/1 :: (reference()) -> 'ok').
-
--spec(on_node_down/1 :: (node()) -> 'ok').
--spec(tcp_listener_addresses/1 :: (listener_config()) -> [address()]).
--spec(tcp_listener_spec/6 ::
-        (name_prefix(), address(), [gen_tcp:listen_option()], protocol(),
-         label(), rabbit_types:mfargs()) -> supervisor:child_spec()).
--spec(ensure_ssl/0 :: () -> rabbit_types:infos()).
--spec(fix_ssl_options/1 :: (rabbit_types:infos()) -> rabbit_types:infos()).
--spec(poodle_check/1 :: (atom()) -> 'ok' | 'danger').
--spec(ssl_transform_fun/1 ::
-        (rabbit_types:infos())
-        -> fun ((rabbit_net:socket())
-                -> rabbit_types:ok_or_error(#ssl_socket{}))).
-
--spec(boot/0 :: () -> 'ok').
--spec(start_client/1 ::
-       (port() | #ssl_socket{ssl::{'sslsocket',_,_}}) ->
-                            atom() | pid() | port() | {atom(),atom()}).
--spec(start_ssl_client/2 ::
-       (_,port() | #ssl_socket{ssl::{'sslsocket',_,_}}) ->
-                                atom() | pid() | port() | {atom(),atom()}).
--spec(tcp_listener_started/3 ::
-       (_,
+                           {hostname(), ip_port(), family()}.
+-type address() :: {inet:ip_address(), ip_port(), family()}.
+-type name_prefix() :: atom().
+-type protocol() :: atom().
+-type label() :: string().
+
+-spec start_tcp_listener(listener_config(), integer()) -> 'ok'.
+-spec start_ssl_listener
+        (listener_config(), rabbit_types:infos(), integer()) -> 'ok'.
+-spec stop_tcp_listener(listener_config()) -> 'ok'.
+-spec active_listeners() -> [rabbit_types:listener()].
+-spec node_listeners(node()) -> [rabbit_types:listener()].
+-spec register_connection(pid()) -> ok.
+-spec unregister_connection(pid()) -> ok.
+-spec connections() -> [rabbit_types:connection()].
+-spec connections_local() -> [rabbit_types:connection()].
+-spec connection_info_keys() -> rabbit_types:info_keys().
+-spec connection_info(rabbit_types:connection()) -> rabbit_types:infos().
+-spec connection_info(rabbit_types:connection(), rabbit_types:info_keys()) ->
+          rabbit_types:infos().
+-spec connection_info_all() -> [rabbit_types:infos()].
+-spec connection_info_all(rabbit_types:info_keys()) ->
+          [rabbit_types:infos()].
+-spec connection_info_all(rabbit_types:info_keys(), reference(), pid()) ->
+          'ok'.
+-spec close_connection(pid(), string()) -> 'ok'.
+-spec force_connection_event_refresh(reference()) -> 'ok'.
+
+-spec on_node_down(node()) -> 'ok'.
+-spec tcp_listener_addresses(listener_config()) -> [address()].
+-spec tcp_listener_spec
+        (name_prefix(), address(), [gen_tcp:listen_option()], module(), module(),
+         protocol(), any(), non_neg_integer(), label()) ->
+            supervisor:child_spec().
+-spec ensure_ssl() -> rabbit_types:infos().
+-spec fix_ssl_options(rabbit_types:infos()) -> rabbit_types:infos().
+-spec poodle_check(atom()) -> 'ok' | 'danger'.
+
+-spec boot() -> 'ok'.
+-spec tcp_listener_started
+        (_,
          string() |
-        {byte(),byte(),byte(),byte()} |
-        {char(),char(),char(),char(),char(),char(),char(),char()},
-        _) ->
-                                    'ok').
--spec(tcp_listener_stopped/3 ::
-       (_,
+         {byte(),byte(),byte(),byte()} |
+         {char(),char(),char(),char(),char(),char(),char(),char()}, _) ->
+            'ok'.
+-spec tcp_listener_stopped
+        (_,
          string() |
-        {byte(),byte(),byte(),byte()} |
-        {char(),char(),char(),char(),char(),char(),char(),char()},
-        _) ->
-                                    'ok').
-
--endif.
+         {byte(),byte(),byte(),byte()} |
+         {char(),char(),char(),char(),char(),char(),char(),char()},
+         _) ->
+            'ok'.
 
 %%----------------------------------------------------------------------------
 
 boot() ->
     ok = record_distribution_listener(),
-    ok = start(),
-    ok = boot_tcp(),
-    ok = boot_ssl().
+    _ = application:start(ranch),
+    ok = boot_tcp(application:get_env(rabbit, num_tcp_acceptors, 10)),
+    ok = boot_ssl(application:get_env(rabbit, num_ssl_acceptors, 1)).
 
-boot_tcp() ->
+boot_tcp(NumAcceptors) ->
     {ok, TcpListeners} = application:get_env(tcp_listeners),
-    [ok = start_tcp_listener(Listener) || Listener <- TcpListeners],
+    [ok = start_tcp_listener(Listener, NumAcceptors) || Listener <- TcpListeners],
     ok.
 
-boot_ssl() ->
+boot_ssl(NumAcceptors) ->
     case application:get_env(ssl_listeners) of
         {ok, []} ->
             ok;
         {ok, SslListeners} ->
             SslOpts = ensure_ssl(),
             case poodle_check('AMQP') of
-                ok     -> [start_ssl_listener(L, SslOpts) || L <- SslListeners];
+                ok     -> [start_ssl_listener(L, SslOpts, NumAcceptors) || L <- SslListeners];
                 danger -> ok
             end,
             ok
     end.
 
-start() -> rabbit_sup:start_supervisor_child(
-             rabbit_tcp_client_sup, rabbit_client_sup,
-             [{local, rabbit_tcp_client_sup},
-              {rabbit_connection_sup,start_link,[]}]).
-
 ensure_ssl() ->
     {ok, SslAppsConfig} = application:get_env(rabbit, ssl_apps),
     ok = app_utils:start_applications(SslAppsConfig),
@@ -190,32 +182,20 @@ fix_ssl_options(Config) ->
 fix_verify_fun(SslOptsConfig) ->
     %% Starting with ssl 4.0.1 in Erlang R14B, the verify_fun function
     %% takes 3 arguments and returns a tuple.
-    {ok, SslAppVer} = application:get_key(ssl, vsn),
-    UseNewVerifyFun = rabbit_misc:version_compare(SslAppVer, "4.0.1", gte),
     case rabbit_misc:pget(verify_fun, SslOptsConfig) of
         {Module, Function, InitialUserState} ->
-            Fun = make_verify_fun(Module, Function, InitialUserState,
-                                  UseNewVerifyFun),
+            Fun = make_verify_fun(Module, Function, InitialUserState),
             rabbit_misc:pset(verify_fun, Fun, SslOptsConfig);
-        {Module, Function} ->
-            Fun = make_verify_fun(Module, Function, none,
-                                  UseNewVerifyFun),
+        {Module, Function} when is_atom(Module) ->
+            Fun = make_verify_fun(Module, Function, none),
             rabbit_misc:pset(verify_fun, Fun, SslOptsConfig);
-        undefined when UseNewVerifyFun ->
+        {Verifyfun, _InitialUserState} when is_function(Verifyfun, 3) ->
             SslOptsConfig;
         undefined ->
-            % unknown_ca errors are silently ignored prior to R14B unless we
-            % supply this verify_fun - remove when at least R14B is required
-            case proplists:get_value(verify, SslOptsConfig, verify_none) of
-                verify_none -> SslOptsConfig;
-                verify_peer -> [{verify_fun, fun([])    -> true;
-                                                ([_|_]) -> false
-                                             end}
-                                | SslOptsConfig]
-            end
+            SslOptsConfig
     end.
 
-make_verify_fun(Module, Function, InitialUserState, UseNewVerifyFun) ->
+make_verify_fun(Module, Function, InitialUserState) ->
     try
         %% Preload the module: it is required to use
         %% erlang:function_exported/3.
@@ -229,7 +209,7 @@ make_verify_fun(Module, Function, InitialUserState, UseNewVerifyFun) ->
     NewForm = erlang:function_exported(Module, Function, 3),
     OldForm = erlang:function_exported(Module, Function, 1),
     case {NewForm, OldForm} of
-        {true, _} when UseNewVerifyFun ->
+        {true, _} ->
             %% This verify_fun is supported by Erlang R14B+ (ssl
             %% 4.0.1 and later).
             Fun = fun(OtpCert, Event, UserState) ->
@@ -237,23 +217,16 @@ make_verify_fun(Module, Function, InitialUserState, UseNewVerifyFun) ->
             end,
             {Fun, InitialUserState};
         {_, true} ->
-            %% This verify_fun is supported by:
-            %%     o  Erlang up-to R13B;
-            %%     o  Erlang R14B+ for undocumented backward
-            %%        compatibility.
+            %% This verify_fun is supported by Erlang R14B+ for 
+            %% undocumented backward compatibility.
             %%
             %% InitialUserState is ignored in this case.
-            fun(ErrorList) ->
-                    Module:Function(ErrorList)
+            fun(Args) ->
+                    Module:Function(Args)
             end;
-        {_, false} when not UseNewVerifyFun ->
-            rabbit_log:error("SSL verify_fun: ~s:~s/1 form required "
-              "for Erlang R13B~n", [Module, Function]),
-            throw({error, {invalid_verify_fun, old_form_required}});
         _ ->
-            Arity = case UseNewVerifyFun of true -> 3; _ -> 1 end,
-            rabbit_log:error("SSL verify_fun: no ~s:~s/~b exported~n",
-              [Module, Function, Arity]),
+            rabbit_log:error("SSL verify_fun: no ~s:~s/3 exported~n",
+              [Module, Function]),
             throw({error, {invalid_verify_fun, function_not_exported}})
     end.
 
@@ -269,35 +242,6 @@ fix_ssl_protocol_versions(Config) ->
             pset(versions, Configured -- ?BAD_SSL_PROTOCOL_VERSIONS, Config)
     end.
 
-ssl_timeout() ->
-    {ok, Val} = application:get_env(rabbit, ssl_handshake_timeout),
-    Val.
-
-ssl_transform_fun(SslOpts) ->
-    fun (Sock) ->
-            Timeout = ssl_timeout(),
-            case catch ssl:ssl_accept(Sock, SslOpts, Timeout) of
-                {ok, SslSock} ->
-                    {ok, #ssl_socket{tcp = Sock, ssl = SslSock}};
-                {error, timeout} ->
-                    {error, {ssl_upgrade_error, timeout}};
-                {error, Reason} ->
-                    %% We have no idea what state the ssl_connection
-                    %% process is in - it could still be happily
-                    %% going, it might be stuck, or it could be just
-                    %% about to fail. There is little that our caller
-                    %% can do but close the TCP socket, but this could
-                    %% cause ssl alerts to get dropped (which is bad
-                    %% form, according to the TLS spec). So we give
-                    %% the ssl_connection a little bit of time to send
-                    %% such alerts.
-                    timer:sleep(Timeout),
-                    {error, {ssl_upgrade_error, Reason}};
-                {'EXIT', Reason} ->
-                    {error, {ssl_upgrade_failure, Reason}}
-            end
-    end.
-
 tcp_listener_addresses(Port) when is_integer(Port) ->
     tcp_listener_addresses_auto(Port);
 tcp_listener_addresses({"auto", Port}) ->
@@ -319,31 +263,34 @@ tcp_listener_addresses_auto(Port) ->
                      Listener <- port_to_listeners(Port)]).
 
 tcp_listener_spec(NamePrefix, {IPAddress, Port, Family}, SocketOpts,
-                  Protocol, Label, OnConnect) ->
+                  Transport, ProtoSup, ProtoOpts, Protocol, NumAcceptors, Label) ->
     {rabbit_misc:tcp_name(NamePrefix, IPAddress, Port),
      {tcp_listener_sup, start_link,
-      [IPAddress, Port, [Family | SocketOpts],
+      [IPAddress, Port, Transport, [Family | SocketOpts], ProtoSup, ProtoOpts,
        {?MODULE, tcp_listener_started, [Protocol]},
        {?MODULE, tcp_listener_stopped, [Protocol]},
-       OnConnect, Label]},
+       NumAcceptors, Label]},
      transient, infinity, supervisor, [tcp_listener_sup]}.
 
-start_tcp_listener(Listener) ->
-    start_listener(Listener, amqp, "TCP Listener",
-                   {?MODULE, start_client, []}).
+start_tcp_listener(Listener, NumAcceptors) ->
+    start_listener(Listener, NumAcceptors, amqp, "TCP Listener", tcp_opts()).
 
-start_ssl_listener(Listener, SslOpts) ->
-    start_listener(Listener, 'amqp/ssl', "SSL Listener",
-                   {?MODULE, start_ssl_client, [SslOpts]}).
+start_ssl_listener(Listener, SslOpts, NumAcceptors) ->
+    start_listener(Listener, NumAcceptors, 'amqp/ssl', "SSL Listener", tcp_opts() ++ SslOpts).
 
-start_listener(Listener, Protocol, Label, OnConnect) ->
-    [start_listener0(Address, Protocol, Label, OnConnect) ||
+start_listener(Listener, NumAcceptors, Protocol, Label, Opts) ->
+    [start_listener0(Address, NumAcceptors, Protocol, Label, Opts) ||
         Address <- tcp_listener_addresses(Listener)],
     ok.
 
-start_listener0(Address, Protocol, Label, OnConnect) ->
-    Spec = tcp_listener_spec(rabbit_tcp_listener_sup, Address, tcp_opts(),
-                             Protocol, Label, OnConnect),
+start_listener0(Address, NumAcceptors, Protocol, Label, Opts) ->
+    Transport = case Protocol of
+        amqp -> ranch_tcp;
+        'amqp/ssl' -> ranch_ssl
+    end,
+    Spec = tcp_listener_spec(rabbit_tcp_listener_sup, Address, Opts,
+                             Transport, rabbit_connection_sup, [], Protocol,
+                             NumAcceptors, Label),
     case supervisor:start_child(rabbit_sup, Spec) of
         {ok, _}                -> ok;
         {error, {shutdown, _}} -> {IPAddress, Port, _Family} = Address,
@@ -400,28 +347,6 @@ on_node_down(Node) ->
                    "Keep ~s listeners: the node is already back~n", [Node])
     end.
 
-start_client(Sock, SockTransform) ->
-    {ok, _Child, Reader} = supervisor:start_child(rabbit_tcp_client_sup, []),
-    ok = rabbit_net:controlling_process(Sock, Reader),
-    Reader ! {go, Sock, SockTransform},
-
-    %% In the event that somebody floods us with connections, the
-    %% reader processes can spew log events at error_logger faster
-    %% than it can keep up, causing its mailbox to grow unbounded
-    %% until we eat all the memory available and crash. So here is a
-    %% meaningless synchronous call to the underlying gen_event
-    %% mechanism. When it returns the mailbox is drained, and we
-    %% return to our caller to accept more connetions.
-    gen_event:which_handlers(error_logger),
-
-    Reader.
-
-start_client(Sock) ->
-    start_client(Sock, fun (S) -> {ok, S} end).
-
-start_ssl_client(SslOpts, Sock) ->
-    start_client(Sock, ssl_transform_fun(SslOpts)).
-
 register_connection(Pid) -> pg_local:join(rabbit_connections, Pid).
 
 unregister_connection(Pid) -> pg_local:leave(rabbit_connections, Pid).
@@ -440,6 +365,11 @@ connection_info(Pid, Items) -> rabbit_reader:info(Pid, Items).
 connection_info_all() -> cmap(fun (Q) -> connection_info(Q) end).
 connection_info_all(Items) -> cmap(fun (Q) -> connection_info(Q, Items) end).
 
+connection_info_all(Items, Ref, AggregatorPid) ->
+    rabbit_control_misc:emitting_map_with_exit_handler(
+      AggregatorPid, Ref, fun(Q) -> connection_info(Q, Items) end,
+      connections()).
+
 close_connection(Pid, Explanation) ->
     rabbit_log:info("Closing connection ~p because ~p~n", [Pid, Explanation]),
     case lists:member(Pid, connections()) of
@@ -476,21 +406,7 @@ cmap(F) -> rabbit_misc:filter_exit_map(F, connections()).
 
 tcp_opts() ->
     {ok, ConfigOpts} = application:get_env(rabbit, tcp_listen_options),
-    merge_essential_tcp_listen_options(ConfigOpts).
-
--define(ESSENTIAL_LISTEN_OPTIONS,
-        [binary,
-         {active, false},
-         {packet, raw},
-         {reuseaddr, true},
-         {nodelay, true}]).
-
-merge_essential_tcp_listen_options(Opts) ->
-    lists:foldl(fun ({K, _} = Opt, Acc) ->
-                        lists:keystore(K, 1, Acc, Opt);
-                    (Opt, Acc) ->
-                        [Opt | Acc]
-                end , Opts, ?ESSENTIAL_LISTEN_OPTIONS).
+    ConfigOpts.
 
 %% inet_parse:address takes care of ip string, like "0.0.0.0"
 %% inet:getaddr returns immediately for ip tuple {0,0,0,0},
similarity index 80%
rename from rabbitmq-server/src/rabbit_nodes.erl
rename to deps/rabbit_common/src/rabbit_nodes.erl
index 090aacc63cacd56e4e1c245589845477636086d5..70a5355d9fc692749860bb6d9e4a357fb5d18fa0 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_nodes).
 
 -export([names/1, diagnostics/1, make/1, parts/1, cookie_hash/0,
          is_running/2, is_process_running/2,
-         cluster_name/0, set_cluster_name/1, ensure_epmd/0]).
+         cluster_name/0, set_cluster_name/1, ensure_epmd/0,
+         all_running/0]).
 
 -include_lib("kernel/include/inet.hrl").
 
 -define(EPMD_TIMEOUT, 30000).
 -define(TCP_DIAGNOSTIC_TIMEOUT, 5000).
+-define(ERROR_LOGGER_HANDLER, rabbit_error_logger_handler).
 
 %%----------------------------------------------------------------------------
 %% Specs
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(names/1 :: (string()) -> rabbit_types:ok_or_error2(
-                                 [{string(), integer()}], term())).
--spec(diagnostics/1 :: ([node()]) -> string()).
--spec(make/1 :: ({string(), string()} | string()) -> node()).
--spec(parts/1 :: (node() | string()) -> {string(), string()}).
--spec(cookie_hash/0 :: () -> string()).
--spec(is_running/2 :: (node(), atom()) -> boolean()).
--spec(is_process_running/2 :: (node(), atom()) -> boolean()).
--spec(cluster_name/0 :: () -> binary()).
--spec(set_cluster_name/1 :: (binary()) -> 'ok').
--spec(ensure_epmd/0 :: () -> 'ok').
-
--endif.
+-spec names(string()) ->
+          rabbit_types:ok_or_error2([{string(), integer()}], term()).
+-spec diagnostics([node()]) -> string().
+-spec make({string(), string()} | string()) -> node().
+-spec parts(node() | string()) -> {string(), string()}.
+-spec cookie_hash() -> string().
+-spec is_running(node(), atom()) -> boolean().
+-spec is_process_running(node(), atom()) -> boolean().
+-spec cluster_name() -> binary().
+-spec set_cluster_name(binary()) -> 'ok'.
+-spec ensure_epmd() -> 'ok'.
+-spec all_running() -> [node()].
 
 %%----------------------------------------------------------------------------
 
@@ -60,12 +59,21 @@ names(Hostname) ->
     end.
 
 diagnostics(Nodes) ->
+    verbose_erlang_distribution(true),
     NodeDiags = [{"~nDIAGNOSTICS~n===========~n~n"
                   "attempted to contact: ~p~n", [Nodes]}] ++
         [diagnostics_node(Node) || Node <- Nodes] ++
         current_node_details(),
+    verbose_erlang_distribution(false),
     rabbit_misc:format_many(lists:flatten(NodeDiags)).
 
+verbose_erlang_distribution(true) ->
+    net_kernel:verbose(1),
+    error_logger:add_report_handler(?ERROR_LOGGER_HANDLER);
+verbose_erlang_distribution(false) ->
+    net_kernel:verbose(0),
+    error_logger:delete_report_handler(?ERROR_LOGGER_HANDLER).
+
 current_node_details() ->
     [{"~ncurrent node details:~n- node name: ~w", [node()]},
      case init:get_argument(home) of
@@ -134,11 +142,7 @@ dist_broken_diagnostics(Name, Host, NamePorts) ->
             [{"  * epmd reports node '~s' running on port ~b", [Name, Port]} |
              case diagnose_connect(Host, Port) of
                  ok ->
-                     [{"  * TCP connection succeeded but Erlang distribution "
-                       "failed~n"
-                       "  * suggestion: hostname mismatch?~n"
-                       "  * suggestion: is the cookie set correctly?~n"
-                       "  * suggestion: is the Erlang distribution using TLS?", []}];
+                     connection_succeeded_diagnostics();
                  {error, Reason} ->
                      [{"  * can't establish TCP connection, reason: ~s~n"
                        "  * suggestion: blocked by firewall?",
@@ -146,6 +150,20 @@ dist_broken_diagnostics(Name, Host, NamePorts) ->
              end]
     end.
 
+connection_succeeded_diagnostics() ->
+    case gen_event:call(error_logger, ?ERROR_LOGGER_HANDLER, get_connection_report) of
+        [] ->
+            [{"  * TCP connection succeeded but Erlang distribution "
+              "failed~n"
+              "  * suggestion: hostname mismatch?~n"
+              "  * suggestion: is the cookie set correctly?~n"
+              "  * suggestion: is the Erlang distribution using TLS?", []}];
+        Report ->
+            [{"  * TCP connection succeeded but Erlang distribution "
+              "failed~n", []}]
+                ++ Report
+    end.
+
 diagnose_connect(Host, Port) ->
     case inet:gethostbyname(Host) of
         {ok, #hostent{h_addrtype = Family}} ->
@@ -202,7 +220,7 @@ set_cluster_name(Name) ->
 
 ensure_epmd() ->
     {ok, Prog} = init:get_argument(progname),
-    ID = random:uniform(1000000000),
+    ID = rabbit_misc:random(1000000000),
     Port = open_port(
              {spawn_executable, os:find_executable(Prog)},
              [{args, ["-sname", rabbit_misc:format("epmd-starter-~b", [ID]),
@@ -215,3 +233,5 @@ port_shutdown_loop(Port) ->
         {Port, {exit_status, _Rc}} -> ok;
         {Port, _}                  -> port_shutdown_loop(Port)
     end.
+
+all_running() -> rabbit_mnesia:cluster_nodes(running).
diff --git a/deps/rabbit_common/src/rabbit_password_hashing.erl b/deps/rabbit_common/src/rabbit_password_hashing.erl
new file mode 100644 (file)
index 0000000..fd2ba70
--- /dev/null
@@ -0,0 +1,20 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_password_hashing).
+-include("rabbit.hrl").
+
+-callback hash(rabbit_types:password()) -> rabbit_types:password_hash().
similarity index 77%
rename from rabbitmq-server/src/rabbit_policy_validator.erl
rename to deps/rabbit_common/src/rabbit_policy_validator.erl
index 7ebea83516bfed3a75931aaa62e0d4bb292e4e55..110a26c9f95769bc8892dc2011bf7e653482694a 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_policy_validator).
 
--ifdef(use_specs).
-
 -export_type([validate_results/0]).
 
 -type(validate_results() ::
         'ok' | {error, string(), [term()]} | [validate_results()]).
 
 -callback validate_policy([{binary(), term()}]) -> validate_results().
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [
-     {validate_policy, 1}
-    ];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 89%
rename from rabbitmq-server/src/rabbit_queue_collector.erl
rename to deps/rabbit_common/src/rabbit_queue_collector.erl
index 734228be34977d1b766c4dbda30102c0e7a57a54..82a891a03ef0fd6952446bc0433887de232f55eb 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_queue_collector).
 
+%% Queue collector keeps track of exclusive queues and cleans them
+%% up e.g. when their connection is closed.
+
 -behaviour(gen_server).
 
 -export([start_link/1, register/2, delete_all/1]).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/1 :: (rabbit_types:proc_name()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(register/2 :: (pid(), pid()) -> 'ok').
--spec(delete_all/1 :: (pid()) -> 'ok').
-
--endif.
+-spec start_link(rabbit_types:proc_name()) -> rabbit_types:ok_pid_or_error().
+-spec register(pid(), pid()) -> 'ok'.
+-spec delete_all(pid()) -> 'ok'.
 
 %%----------------------------------------------------------------------------
 
similarity index 86%
rename from rabbitmq-server/src/rabbit_queue_decorator.erl
rename to deps/rabbit_common/src/rabbit_queue_decorator.erl
index 129f51d099b8c6723bf7dd5a5b1fcc603d2e76b6..ee248027276cd0bdb97ce59c7dd1331f4b330128 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_queue_decorator).
@@ -22,8 +22,6 @@
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -callback startup(rabbit_types:amqqueue()) -> 'ok'.
 
 -callback shutdown(rabbit_types:amqqueue()) -> 'ok'.
 -callback consumer_state_changed(
             rabbit_types:amqqueue(), integer(), boolean()) -> 'ok'.
 
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{description, 0}, {startup, 1}, {shutdown, 1}, {policy_changed, 2},
-     {active_for, 1}, {consumer_state_changed, 3}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
-
 %%----------------------------------------------------------------------------
 
 select(Modules) ->
diff --git a/deps/rabbit_common/src/rabbit_queue_master_locator.erl b/deps/rabbit_common/src/rabbit_queue_master_locator.erl
new file mode 100644 (file)
index 0000000..21596ff
--- /dev/null
@@ -0,0 +1,21 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License at
+%% http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%% License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_queue_master_locator).
+
+-callback description()                -> [proplists:property()].
+-callback queue_master_location(rabbit_types:amqqueue()) ->
+    {'ok', node()} | {'error', term()}.
similarity index 76%
rename from rabbitmq-server/src/rabbit_reader.erl
rename to deps/rabbit_common/src/rabbit_reader.erl
index 8812e1d0e1e3a1ec2b28b0231804580c35a422f1..e5aeadcd268c970e6a31c5b23a7fa8cf1ef1bf65 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_reader).
+
+%% This is an AMQP 0-9-1 connection implementation. If AMQP 1.0 plugin is enabled,
+%% this module passes control of incoming AMQP 1.0 connections to it.
+%%
+%% Every connection (as in, a process using this module)
+%% is a controlling process for a server socket.
+%%
+%% Connections have a number of responsibilities:
+%%
+%%  * Performing protocol handshake
+%%  * Parsing incoming data and dispatching protocol methods
+%%  * Authenticating clients (with the help of authentication backends)
+%%  * Enforcing TCP backpressure (throttling clients)
+%%  * Enforcing connection limits, e.g. channel_max
+%%  * Channel management
+%%  * Setting up heartbeater and alarm notifications
+%%  * Emitting connection and network activity metric events
+%%  * Gracefully handling client disconnects, channel termination, etc
+%%
+%% and a few more.
+%%
+%% Every connection has
+%%
+%%  * a queue collector which is responsible for keeping
+%%    track of exclusive queues on the connection and their cleanup.
+%%  * a heartbeater that's responsible for sending heartbeat frames to clients,
+%%    keeping track of the incoming ones and notifying connection about
+%%    heartbeat timeouts
+%%  * Stats timer, a timer that is used to periodically emit metric events
+%%
+%% Some dependencies are started under a separate supervisor to avoid deadlocks
+%% during system shutdown. See rabbit_channel_sup:start_link/0 for details.
+%%
+%% Reader processes are special processes (in the OTP sense).
+
 -include("rabbit_framing.hrl").
 -include("rabbit.hrl").
 
--export([start_link/1, info_keys/0, info/1, info/2, force_event_refresh/2,
+-export([start_link/3, info_keys/0, info/1, info/2, force_event_refresh/2,
          shutdown/2]).
 
 -export([system_continue/3, system_terminate/4, system_code_change/4]).
 
--export([init/2, mainloop/4, recvloop/4]).
+-export([init/4, mainloop/4, recvloop/4]).
 
 -export([conserve_resources/3, server_properties/1]).
 
 -define(NORMAL_TIMEOUT, 3).
 -define(CLOSING_TIMEOUT, 30).
 -define(CHANNEL_TERMINATION_TIMEOUT, 3).
+%% we wait for this many seconds before closing TCP connection
+%% with a client that failed to log in. Provides some relief
+%% from connection storms and DoS.
 -define(SILENT_CLOSE_DELAY, 3).
 -define(CHANNEL_MIN, 1).
 
 %%--------------------------------------------------------------------------
 
--record(v1, {parent, sock, connection, callback, recv_len, pending_recv,
-             connection_state, helper_sup, queue_collector, heartbeater,
-             stats_timer, channel_sup_sup_pid, channel_count, throttle}).
-
--record(connection, {name, host, peer_host, port, peer_port,
-                     protocol, user, timeout_sec, frame_max, channel_max, vhost,
-                     client_properties, capabilities,
-                     auth_mechanism, auth_state, connected_at}).
-
--record(throttle, {alarmed_by, last_blocked_by, last_blocked_at}).
+-record(v1, {
+          %% parent process
+          parent,
+          %% socket
+          sock,
+          %% connection state, see connection record
+          connection,
+          callback,
+          recv_len,
+          pending_recv,
+          %% pre_init | securing | running | blocking | blocked | closing | closed | {become, F}
+          connection_state,
+          %% see comment in rabbit_connection_sup:start_link/0
+          helper_sup,
+          %% takes care of cleaning up exclusive queues,
+          %% see rabbit_queue_collector
+          queue_collector,
+          %% sends and receives heartbeat frames,
+          %% see rabbit_heartbeat
+          heartbeater,
+          %% timer used to emit statistics
+          stats_timer,
+          %% channel supervisor
+          channel_sup_sup_pid,
+          %% how many channels this connection has
+          channel_count,
+          %% throttling state, for both
+          %% credit- and resource-driven flow control
+          throttle}).
+
+-record(connection, {
+          %% e.g. <<"127.0.0.1:55054 -> 127.0.0.1:5672">>
+          name,
+          %% used for logging: same as `name`, but optionally
+          %% augmented with user-supplied name
+          log_name,
+          %% server host
+          host,
+          %% client host
+          peer_host,
+          %% server port
+          port,
+          %% client port
+          peer_port,
+          %% protocol framing implementation module,
+          %% e.g. rabbit_framing_amqp_0_9_1
+          protocol,
+          user,
+          %% heartbeat timeout value used, 0 means
+          %% heartbeats are disabled
+          timeout_sec,
+          %% maximum allowed frame size,
+          %% see frame_max in the AMQP 0-9-1 spec
+          frame_max,
+          %% greatest channel number allowed,
+          %% see channel_max in the AMQP 0-9-1 spec
+          channel_max,
+          vhost,
+          %% client name, version, platform, etc
+          client_properties,
+          %% what lists protocol extensions
+          %% does this client support?
+          capabilities,
+          %% authentication mechanism used
+          %% as a pair of {Name, Module}
+          auth_mechanism,
+          %% authentication mechanism state,
+          %% initialised by rabbit_auth_mechanism:init/1
+          %% implementations
+          auth_state,
+          %% time of connection
+          connected_at}).
+
+-record(throttle, {
+  %% list of active alarms
+  alarmed_by,
+  %% flow | resource
+  last_blocked_by,
+  %% never | timestamp()
+  last_blocked_at
+}).
 
 -define(STATISTICS_KEYS, [pid, recv_oct, recv_cnt, send_oct, send_cnt,
-                          send_pend, state, channels]).
+                          send_pend, state, channels, reductions,
+                          garbage_collection]).
 
 -define(CREATION_EVENT_KEYS,
         [pid, name, port, peer_port, host,
 -define(INFO_KEYS, ?CREATION_EVENT_KEYS ++ ?STATISTICS_KEYS -- [pid]).
 
 -define(AUTH_NOTIFICATION_INFO_KEYS,
-        [host, vhost, name, peer_host, peer_port, protocol, auth_mechanism,
+        [host, name, peer_host, peer_port, protocol, auth_mechanism,
          ssl, ssl_protocol, ssl_cipher, peer_cert_issuer, peer_cert_subject,
          peer_cert_validity]).
 
 
 %%--------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/1 :: (pid()) -> rabbit_types:ok(pid())).
--spec(info_keys/0 :: () -> rabbit_types:info_keys()).
--spec(info/1 :: (pid()) -> rabbit_types:infos()).
--spec(info/2 :: (pid(), rabbit_types:info_keys()) -> rabbit_types:infos()).
--spec(force_event_refresh/2 :: (pid(), reference()) -> 'ok').
--spec(shutdown/2 :: (pid(), string()) -> 'ok').
--spec(conserve_resources/3 :: (pid(), atom(), boolean()) -> 'ok').
--spec(server_properties/1 :: (rabbit_types:protocol()) ->
-                                  rabbit_framing:amqp_table()).
+-spec start_link(pid(), any(), rabbit_net:socket()) -> rabbit_types:ok(pid()).
+-spec info_keys() -> rabbit_types:info_keys().
+-spec info(pid()) -> rabbit_types:infos().
+-spec info(pid(), rabbit_types:info_keys()) -> rabbit_types:infos().
+-spec force_event_refresh(pid(), reference()) -> 'ok'.
+-spec shutdown(pid(), string()) -> 'ok'.
+-type resource_alert() :: {WasAlarmSetForNode :: boolean(),
+                           IsThereAnyAlarmsWithSameSourceInTheCluster :: boolean(),
+                           NodeForWhichAlarmWasSetOrCleared :: node()}.
+-spec conserve_resources(pid(), atom(), resource_alert()) -> 'ok'.
+-spec server_properties(rabbit_types:protocol()) ->
+          rabbit_framing:amqp_table().
 
 %% These specs only exists to add no_return() to keep dialyzer happy
--spec(init/2 :: (pid(), pid()) -> no_return()).
--spec(start_connection/5 ::
-        (pid(), pid(), any(), rabbit_net:socket(),
-         fun ((rabbit_net:socket()) ->
-                     rabbit_types:ok_or_error2(
-                       rabbit_net:socket(), any()))) -> no_return()).
-
--spec(mainloop/4 :: (_,[binary()], non_neg_integer(), #v1{}) -> any()).
--spec(system_code_change/4 :: (_,_,_,_) -> {'ok',_}).
--spec(system_continue/3 :: (_,_,{[binary()], non_neg_integer(), #v1{}}) ->
-                                any()).
--spec(system_terminate/4 :: (_,_,_,_) -> none()).
+-spec init(pid(), pid(), any(), rabbit_net:socket()) -> no_return().
+-spec start_connection(pid(), pid(), any(), rabbit_net:socket()) ->
+          no_return().
 
--endif.
+-spec mainloop(_,[binary()], non_neg_integer(), #v1{}) -> any().
+-spec system_code_change(_,_,_,_) -> {'ok',_}.
+-spec system_continue(_,_,{[binary()], non_neg_integer(), #v1{}}) -> any().
+-spec system_terminate(_,_,_,_) -> none().
 
 %%--------------------------------------------------------------------------
 
-start_link(HelperSup) ->
-    {ok, proc_lib:spawn_link(?MODULE, init, [self(), HelperSup])}.
+start_link(HelperSup, Ref, Sock) ->
+    Pid = proc_lib:spawn_link(?MODULE, init, [self(), HelperSup, Ref, Sock]),
+
+    %% In the event that somebody floods us with connections, the
+    %% reader processes can spew log events at error_logger faster
+    %% than it can keep up, causing its mailbox to grow unbounded
+    %% until we eat all the memory available and crash. So here is a
+    %% meaningless synchronous call to the underlying gen_event
+    %% mechanism. When it returns the mailbox is drained, and we
+    %% return to our caller to accept more connections.
+    gen_event:which_handlers(error_logger),
+
+    {ok, Pid}.
 
 shutdown(Pid, Explanation) ->
     gen_server:call(Pid, {shutdown, Explanation}, infinity).
 
-init(Parent, HelperSup) ->
+init(Parent, HelperSup, Ref, Sock) ->
+    rabbit_net:accept_ack(Ref, Sock),
     Deb = sys:debug_options([]),
-    receive
-        {go, Sock, SockTransform} ->
-            start_connection(Parent, HelperSup, Deb, Sock, SockTransform)
-    end.
+    start_connection(Parent, HelperSup, Deb, Sock).
 
 system_continue(Parent, Deb, {Buf, BufLen, State}) ->
     mainloop(Deb, Buf, BufLen, State#v1{parent = Parent}).
@@ -140,7 +254,7 @@ info(Pid, Items) ->
 force_event_refresh(Pid, Ref) ->
     gen_server:cast(Pid, {force_event_refresh, Ref}).
 
-conserve_resources(Pid, Source, Conserve) ->
+conserve_resources(Pid, Source, {_, Conserve, _}) ->
     Pid ! {conserve_resources, Source, Conserve},
     ok.
 
@@ -184,7 +298,8 @@ server_capabilities(rabbit_framing_amqp_0_9_1) ->
      {<<"connection.blocked">>,           bool, true},
      {<<"consumer_priorities">>,          bool, true},
      {<<"authentication_failure_close">>, bool, true},
-     {<<"per_consumer_qos">>,             bool, true}];
+     {<<"per_consumer_qos">>,             bool, true},
+     {<<"direct_reply_to">>,              bool, true}];
 server_capabilities(_) ->
     [].
 
@@ -214,15 +329,14 @@ socket_op(Sock, Fun) ->
     case Fun(Sock) of
         {ok, Res}       -> Res;
         {error, Reason} -> socket_error(Reason),
-                           %% NB: this is tcp socket, even in case of ssl
                            rabbit_net:fast_close(Sock),
                            exit(normal)
     end.
 
-start_connection(Parent, HelperSup, Deb, Sock, SockTransform) ->
+start_connection(Parent, HelperSup, Deb, Sock) ->
     process_flag(trap_exit, true),
     Name = case rabbit_net:connection_string(Sock, inbound) of
-               {ok, Str}         -> Str;
+               {ok, Str}         -> list_to_binary(Str);
                {error, enotconn} -> rabbit_net:fast_close(Sock),
                                     exit(normal);
                {error, Reason}   -> socket_error(Reason),
@@ -230,15 +344,16 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) ->
                                     exit(normal)
            end,
     {ok, HandshakeTimeout} = application:get_env(rabbit, handshake_timeout),
-    ClientSock = socket_op(Sock, SockTransform),
+    InitialFrameMax = application:get_env(rabbit, initial_frame_max, ?FRAME_MIN_SIZE),
     erlang:send_after(HandshakeTimeout, self(), handshake_timeout),
     {PeerHost, PeerPort, Host, Port} =
         socket_op(Sock, fun (S) -> rabbit_net:socket_ends(S, inbound) end),
-    ?store_proc_name(list_to_binary(Name)),
+    ?store_proc_name(Name),
     State = #v1{parent              = Parent,
-                sock                = ClientSock,
+                sock                = Sock,
                 connection          = #connection{
-                  name               = list_to_binary(Name),
+                  name               = Name,
+                  log_name           = Name,
                   host               = Host,
                   peer_host          = PeerHost,
                   port               = Port,
@@ -246,13 +361,14 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) ->
                   protocol           = none,
                   user               = none,
                   timeout_sec        = (HandshakeTimeout / 1000),
-                  frame_max          = ?FRAME_MIN_SIZE,
+                  frame_max          = InitialFrameMax,
                   vhost              = none,
                   client_properties  = none,
                   capabilities       = [],
                   auth_mechanism     = none,
                   auth_state         = none,
-                  connected_at       = rabbit_misc:now_to_ms(os:timestamp())},
+                  connected_at       = time_compat:os_system_time(
+                                         milli_seconds)},
                 callback            = uninitialized_callback,
                 recv_len            = 0,
                 pending_recv        = false,
@@ -271,10 +387,10 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) ->
              [Deb, [], 0, switch_callback(rabbit_event:init_stats_timer(
                                             State, #v1.stats_timer),
                                           handshake, 8)]}),
-        log(info, "closing AMQP connection ~p (~s)~n", [self(), Name])
+        log(info, "closing AMQP connection ~p (~s)~n", [self(), dynamic_connection_name(Name)])
     catch
         Ex ->
-          log_connection_exception(Name, Ex)
+          log_connection_exception(dynamic_connection_name(Name), Ex)
     after
         %% We don't call gen_tcp:close/1 here since it waits for
         %% pending output to be sent, which results in unnecessary
@@ -283,27 +399,30 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) ->
         %% the socket. However, to keep the file_handle_cache
         %% accounting as accurate as possible we ought to close the
         %% socket w/o delay before termination.
-        rabbit_net:fast_close(ClientSock),
+        rabbit_net:fast_close(Sock),
         rabbit_networking:unregister_connection(self()),
         rabbit_event:notify(connection_closed, [{pid, self()}])
     end,
     done.
 
 log_connection_exception(Name, Ex) ->
-  Severity = case Ex of
-      connection_closed_with_no_data_received -> debug;
-      connection_closed_abruptly              -> warning;
-      _                                       -> error
-    end,
-  log_connection_exception(Severity, Name, Ex).
+    Severity = case Ex of
+                   connection_closed_with_no_data_received -> debug;
+                   connection_closed_abruptly              -> warning;
+                   _                                       -> error
+               end,
+    log_connection_exception(Severity, Name, Ex).
 
 log_connection_exception(Severity, Name, {heartbeat_timeout, TimeoutSec}) ->
-  %% Long line to avoid extra spaces and line breaks in log
-  log(Severity, "closing AMQP connection ~p (~s):~nMissed heartbeats from client, timeout: ~ps~n",
-    [self(), Name, TimeoutSec]);
+    %% Long line to avoid extra spaces and line breaks in log
+    log(Severity, "closing AMQP connection ~p (~s):~nmissed heartbeats from client, timeout: ~ps~n",
+        [self(), Name, TimeoutSec]);
+log_connection_exception(Severity, Name, connection_closed_abruptly) ->
+    log(Severity, "closing AMQP connection ~p (~s):~nclient unexpectedly closed TCP connection~n",
+        [self(), Name]);
 log_connection_exception(Severity, Name, Ex) ->
-  log(Severity, "closing AMQP connection ~p (~s):~n~p~n",
-    [self(), Name, Ex]).
+    log(Severity, "closing AMQP connection ~p (~s):~n~p~n",
+        [self(), Name, Ex]).
 
 run({M, F, A}) ->
     try apply(M, F, A)
@@ -371,6 +490,10 @@ mainloop(Deb, Buf, BufLen, State = #v1{sock = Sock,
             stop(tcp_healthcheck, State);
         closed ->
             stop(closed, State);
+        {other, {heartbeat_send_error, Reason}} ->
+            %% The only portable way to detect disconnect on blocked
+            %% connection is to wait for heartbeat send failure.
+            stop(Reason, State);
         {error, Reason} ->
             stop(Reason, State);
         {other, {system, From, Request}} ->
@@ -516,7 +639,8 @@ maybe_block(State = #v1{connection_state = blocking,
     State1 = State#v1{connection_state = blocked,
                       throttle = update_last_blocked_by(
                                    Throttle#throttle{
-                                     last_blocked_at = erlang:now()})},
+                                     last_blocked_at =
+                                       time_compat:monotonic_time()})},
     case {blocked_by_alarm(State), blocked_by_alarm(State1)} of
         {false, true} -> ok = send_blocked(State1);
         {_,        _} -> ok
@@ -572,7 +696,7 @@ close_connection(State = #v1{queue_collector = Collector,
     %% current connection, and are deleted when that connection
     %% closes."  This does not strictly imply synchrony, but in
     %% practice it seems to be what people assume.
-    rabbit_queue_collector:delete_all(Collector),
+    clean_up_exclusive_queues(Collector),
     %% We terminate the connection after the specified interval, but
     %% no later than ?CLOSING_TIMEOUT seconds.
     erlang:send_after((if TimeoutSec > 0 andalso
@@ -581,6 +705,15 @@ close_connection(State = #v1{queue_collector = Collector,
                        end) * 1000, self(), terminate_connection),
     State#v1{connection_state = closed}.
 
+%% queue collector will be undefined when connection
+%% tuning was never performed or didn't finish. In such cases
+%% there's also nothing to clean up.
+clean_up_exclusive_queues(undefined) ->
+    ok;
+
+clean_up_exclusive_queues(Collector) ->
+    rabbit_queue_collector:delete_all(Collector).
+
 handle_dependent_exit(ChPid, Reason, State) ->
     {Channel, State1} = channel_cleanup(ChPid, State),
     case {Channel, termination_kind(Reason)} of
@@ -611,9 +744,10 @@ wait_for_channel_termination(0, TimerRef, State) ->
 wait_for_channel_termination(N, TimerRef,
                              State = #v1{connection_state = CS,
                                          connection = #connection{
-                                                         name  = ConnName,
-                                                         user  = User,
-                                                         vhost = VHost}}) ->
+                                                         log_name  = ConnName,
+                                                         user      = User,
+                                                         vhost     = VHost},
+                                         sock = Sock}) ->
     receive
         {'DOWN', _MRef, process, ChPid, Reason} ->
             {Channel, State1} = channel_cleanup(ChPid, State),
@@ -630,6 +764,9 @@ wait_for_channel_termination(N, TimerRef,
                          CS, Channel, Reason]),
                     wait_for_channel_termination(N-1, TimerRef, State1)
             end;
+        {'EXIT', Sock, _Reason} ->
+            [channel_cleanup(ChPid, State) || ChPid <- all_channels()],
+            exit(normal);
         cancel_wait ->
             exit(channel_termination_timeout)
     end.
@@ -647,15 +784,23 @@ maybe_close(State) ->
 termination_kind(normal) -> controlled;
 termination_kind(_)      -> uncontrolled.
 
+format_hard_error(#amqp_error{name = N, explanation = E, method = M}) ->
+    io_lib:format("operation ~s caused a connection exception ~s: ~p", [M, N, E]);
+format_hard_error(Reason) ->
+    case io_lib:deep_char_list(Reason) of
+        true  -> Reason;
+        false -> rabbit_misc:format("~p", [Reason])
+    end.
+
 log_hard_error(#v1{connection_state = CS,
                    connection = #connection{
-                                   name  = ConnName,
-                                   user  = User,
-                                   vhost = VHost}}, Channel, Reason) ->
+                                   log_name  = ConnName,
+                                   user      = User,
+                                   vhost     = VHost}}, Channel, Reason) ->
     log(error,
         "Error on AMQP connection ~p (~s, vhost: '~s',"
-        " user: '~s', state: ~p), channel ~p:~n~p~n",
-        [self(), ConnName, VHost, User#user.username, CS, Channel, Reason]).
+        " user: '~s', state: ~p), channel ~p:~n~s~n",
+        [self(), ConnName, VHost, User#user.username, CS, Channel, format_hard_error(Reason)]).
 
 handle_exception(State = #v1{connection_state = closed}, Channel, Reason) ->
     log_hard_error(State, Channel, Reason),
@@ -664,12 +809,55 @@ handle_exception(State = #v1{connection = #connection{protocol = Protocol},
                              connection_state = CS},
                  Channel, Reason)
   when ?IS_RUNNING(State) orelse CS =:= closing ->
-    log_hard_error(State, Channel, Reason),
-    {0, CloseMethod} =
-        rabbit_binary_generator:map_exception(Channel, Reason, Protocol),
-    State1 = close_connection(terminate_channels(State)),
-    ok = send_on_channel0(State1#v1.sock, CloseMethod, Protocol),
-    State1;
+    respond_and_close(State, Channel, Protocol, Reason, Reason);
+%% authentication failure
+handle_exception(State = #v1{connection = #connection{protocol = Protocol,
+                                                      log_name = ConnName,
+                                                      capabilities = Capabilities},
+                             connection_state = starting},
+                 Channel, Reason = #amqp_error{name = access_refused,
+                                               explanation = ErrMsg}) ->
+    log(error,
+        "Error on AMQP connection ~p (~s, state: ~p):~n~s~n",
+        [self(), ConnName, starting, ErrMsg]),
+    %% respect authentication failure notification capability
+    case rabbit_misc:table_lookup(Capabilities,
+                                  <<"authentication_failure_close">>) of
+        {bool, true} ->
+            send_error_on_channel0_and_close(Channel, Protocol, Reason, State);
+        _ ->
+            close_connection(terminate_channels(State))
+    end;
+%% when loopback-only user tries to connect from a non-local host
+%% when user tries to access a vhost it has no permissions for
+handle_exception(State = #v1{connection = #connection{protocol = Protocol,
+                                                      log_name = ConnName,
+                                                      user = User},
+                             connection_state = opening},
+                 Channel, Reason = #amqp_error{name = not_allowed,
+                                               explanation = ErrMsg}) ->
+    log(error,
+        "Error on AMQP connection ~p (~s, user: '~s', state: ~p):~n~s~n",
+        [self(), ConnName, User#user.username, opening, ErrMsg]),
+    send_error_on_channel0_and_close(Channel, Protocol, Reason, State);
+handle_exception(State = #v1{connection = #connection{protocol = Protocol},
+                             connection_state = CS = opening},
+                 Channel, Reason = #amqp_error{}) ->
+    respond_and_close(State, Channel, Protocol, Reason,
+                      {handshake_error, CS, Reason});
+%% when negotiation fails, e.g. due to channel_max being higher than the
+%% maxiumum allowed limit
+handle_exception(State = #v1{connection = #connection{protocol = Protocol,
+                                                      log_name = ConnName,
+                                                      user = User},
+                             connection_state = tuning},
+                 Channel, Reason = #amqp_error{name = not_allowed,
+                                               explanation = ErrMsg}) ->
+    log(error,
+        "Error on AMQP connection ~p (~s,"
+        " user: '~s', state: ~p):~n~s~n",
+        [self(), ConnName, User#user.username, tuning, ErrMsg]),
+    send_error_on_channel0_and_close(Channel, Protocol, Reason, State);
 handle_exception(State, Channel, Reason) ->
     %% We don't trust the client at this point - force them to wait
     %% for a bit so they can't DOS us with repeated failed logins etc.
@@ -914,9 +1102,8 @@ refuse_connection(Sock, Exception, {A, B, C, D}) ->
     ok = inet_op(fun () -> rabbit_net:send(Sock, <<"AMQP",A,B,C,D>>) end),
     throw(Exception).
 
--ifdef(use_specs).
--spec(refuse_connection/2 :: (rabbit_net:socket(), any()) -> no_return()).
--endif.
+-spec refuse_connection(rabbit_net:socket(), any()) -> no_return().
+
 refuse_connection(Sock, Exception) ->
     refuse_connection(Sock, Exception, {0, 0, 9, 1}).
 
@@ -946,7 +1133,7 @@ handle_method0(#'connection.start_ok'{mechanism = Mechanism,
                                       response = Response,
                                       client_properties = ClientProperties},
                State0 = #v1{connection_state = starting,
-                            connection       = Connection,
+                            connection       = Connection0,
                             sock             = Sock}) ->
     AuthMechanism = auth_mechanism_to_module(Mechanism, Sock),
     Capabilities =
@@ -954,13 +1141,14 @@ handle_method0(#'connection.start_ok'{mechanism = Mechanism,
             {table, Capabilities1} -> Capabilities1;
             _                      -> []
         end,
+    Connection1 = Connection0#connection{
+                    client_properties = ClientProperties,
+                    capabilities      = Capabilities,
+                    auth_mechanism    = {Mechanism, AuthMechanism},
+                    auth_state        = AuthMechanism:init(Sock)},
+    Connection2 = augment_connection_log_name(Connection1),
     State = State0#v1{connection_state = securing,
-                      connection       =
-                          Connection#connection{
-                            client_properties = ClientProperties,
-                            capabilities      = Capabilities,
-                            auth_mechanism    = {Mechanism, AuthMechanism},
-                            auth_state        = AuthMechanism:init(Sock)}},
+                      connection       = Connection2},
     auth_phase(Response, State);
 
 handle_method0(#'connection.secure_ok'{response = Response},
@@ -981,8 +1169,19 @@ handle_method0(#'connection.tune_ok'{frame_max   = FrameMax,
     {ok, Collector} = rabbit_connection_helper_sup:start_queue_collector(
                         SupPid, Connection#connection.name),
     Frame = rabbit_binary_generator:build_heartbeat_frame(),
-    SendFun = fun() -> catch rabbit_net:send(Sock, Frame) end,
     Parent = self(),
+    SendFun =
+        fun() ->
+                case catch rabbit_net:send(Sock, Frame) of
+                    ok ->
+                        ok;
+                    {error, Reason} ->
+                        Parent ! {heartbeat_send_error, Reason};
+                    Unexpected ->
+                        Parent ! {heartbeat_send_error, Unexpected}
+                end,
+                ok
+        end,
     ReceiveFun = fun() -> Parent ! heartbeat_timeout end,
     Heartbeater = rabbit_heartbeat:start(
                     SupPid, Sock, Connection#connection.name,
@@ -1044,7 +1243,7 @@ handle_method0(_Method, #v1{connection_state = S}) ->
 validate_negotiated_integer_value(Field, Min, ClientValue) ->
     ServerValue = get_env(Field),
     if ClientValue /= 0 andalso ClientValue < Min ->
-            fail_negotiation(Field, min, ServerValue, ClientValue);
+            fail_negotiation(Field, min, Min, ClientValue);
        ServerValue /= 0 andalso (ClientValue =:= 0 orelse
                                  ClientValue > ServerValue) ->
             fail_negotiation(Field, max, ServerValue, ClientValue);
@@ -1054,7 +1253,7 @@ validate_negotiated_integer_value(Field, Min, ClientValue) ->
 
 %% keep dialyzer happy
 -spec fail_negotiation(atom(), 'min' | 'max', integer(), integer()) ->
-                              no_return().
+          no_return().
 fail_negotiation(Field, MinOrMax, ServerValue, ClientValue) ->
     {S1, S2} = case MinOrMax of
                    min -> {lower,  minimum};
@@ -1135,11 +1334,10 @@ auth_phase(Response,
                                                         auth_state = none}}
     end.
 
--ifdef(use_specs).
--spec(auth_fail/5 ::
+-spec auth_fail
         (rabbit_types:username() | none, string(), [any()], binary(), #v1{}) ->
-           no_return()).
--endif.
+            no_return().
+
 auth_fail(Username, Msg, Args, AuthName,
           State = #v1{connection = #connection{protocol     = Protocol,
                                                capabilities = Capabilities}}) ->
@@ -1202,10 +1400,17 @@ i(state, #v1{connection_state = ConnectionState,
         (credit_flow:blocked() %% throttled by flow now
          orelse                %% throttled by flow recently
            (WasBlockedBy =:= flow andalso T =/= never andalso
-            timer:now_diff(erlang:now(), T) < 5000000)) of
+            time_compat:convert_time_unit(time_compat:monotonic_time() - T,
+                                          native,
+                                          micro_seconds) < 5000000)) of
         true  -> flow;
         false -> ConnectionState
     end;
+i(garbage_collection, _State) ->
+    rabbit_misc:get_gc_info(self());
+i(reductions, _State) ->
+    {reductions, Reductions} = erlang:process_info(self(), reductions),
+    Reductions;
 i(Item,               #v1{connection = Conn}) -> ic(Item, Conn).
 
 ic(name,              #connection{name        = Name})     -> Name;
@@ -1234,14 +1439,19 @@ socket_info(Get, Select, #v1{sock = Sock}) ->
     end.
 
 ssl_info(F, #v1{sock = Sock}) ->
-    %% The first ok form is R14
-    %% The second is R13 - the extra term is exportability (by inspection,
-    %% the docs are wrong)
     case rabbit_net:ssl_info(Sock) of
-        nossl                   -> '';
-        {error, _}              -> '';
-        {ok, {P, {K, C, H}}}    -> F({P, {K, C, H}});
-        {ok, {P, {K, C, H, _}}} -> F({P, {K, C, H}})
+        nossl       -> '';
+        {error, _}  -> '';
+        {ok, Items} ->
+            P = proplists:get_value(protocol, Items),
+            CS = proplists:get_value(cipher_suite, Items),
+            %% The first form is R14.
+            %% The second is R13 - the extra term is exportability (by
+            %% inspection, the docs are wrong).
+            case CS of
+                {K, C, H}    -> F({P, {K, C, H}});
+                {K, C, H, _} -> F({P, {K, C, H}})
+            end
     end.
 
 cert_info(F, #v1{sock = Sock}) ->
@@ -1259,18 +1469,11 @@ emit_stats(State) ->
     Infos = infos(?STATISTICS_KEYS, State),
     rabbit_event:notify(connection_stats, Infos),
     State1 = rabbit_event:reset_stats_timer(State, #v1.stats_timer),
-    %% If we emit an event which looks like we are in flow control, it's not a
-    %% good idea for it to be our last even if we go idle. Keep emitting
-    %% events, either we stay busy or we drop out of flow control.
-    case proplists:get_value(state, Infos) of
-        flow -> ensure_stats_timer(State1);
-        _    -> State1
-    end.
+    ensure_stats_timer(State1).
 
 %% 1.0 stub
--ifdef(use_specs).
--spec(become_1_0/2 :: (non_neg_integer(), #v1{}) -> no_return()).
--endif.
+-spec become_1_0(non_neg_integer(), #v1{}) -> no_return().
+
 become_1_0(Id, State = #v1{sock = Sock}) ->
     case code:is_loaded(rabbit_amqp1_0_reader) of
         false -> refuse_connection(Sock, amqp1_0_plugin_not_enabled);
@@ -1294,3 +1497,34 @@ pack_for_1_0(Buf, BufLen, #v1{parent       = Parent,
                               pending_recv = PendingRecv,
                               helper_sup   = SupPid}) ->
     {Parent, Sock, RecvLen, PendingRecv, SupPid, Buf, BufLen}.
+
+respond_and_close(State, Channel, Protocol, Reason, LogErr) ->
+    log_hard_error(State, Channel, LogErr),
+    send_error_on_channel0_and_close(Channel, Protocol, Reason, State).
+
+send_error_on_channel0_and_close(Channel, Protocol, Reason, State) ->
+    {0, CloseMethod} =
+        rabbit_binary_generator:map_exception(Channel, Reason, Protocol),
+    State1 = close_connection(terminate_channels(State)),
+    ok = send_on_channel0(State#v1.sock, CloseMethod, Protocol),
+    State1.
+
+augment_connection_log_name(#connection{client_properties = ClientProperties,
+                                        name = Name} = Connection) ->
+    case rabbit_misc:table_lookup(ClientProperties, <<"connection_name">>) of
+        {longstr, UserSpecifiedName} ->
+            LogName = <<Name/binary, " - ", UserSpecifiedName/binary>>,
+            log(info, "Connection ~p (~s) has a client-provided name: ~s~n", [self(), Name, UserSpecifiedName]),
+            ?store_proc_name(LogName),
+            Connection#connection{log_name = LogName};
+        _ ->
+            Connection
+    end.
+
+dynamic_connection_name(Default) ->
+    case rabbit_misc:get_proc_name() of
+        {ok, Name} ->
+            Name;
+        _ ->
+            Default
+    end.
similarity index 78%
rename from rabbitmq-server/src/rabbit_runtime_parameter.erl
rename to deps/rabbit_common/src/rabbit_runtime_parameter.erl
index 1d4bc0b5753fbfb3efc41d536e507e64ffa0635f..e287d2f2bb5182c19ce088036c9454ce10e84228 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_runtime_parameter).
 
--ifdef(use_specs).
-
 -type(validate_results() ::
         'ok' | {error, string(), [term()]} | [validate_results()]).
 
                    term(), rabbit_types:user()) -> validate_results().
 -callback notify(rabbit_types:vhost(), binary(), binary(), term()) -> 'ok'.
 -callback notify_clear(rabbit_types:vhost(), binary(), binary()) -> 'ok'.
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [
-     {validate, 5},
-     {notify, 4},
-     {notify_clear, 3}
-    ];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
similarity index 97%
rename from rabbitmq-server/src/rabbit_types.erl
rename to deps/rabbit_common/src/rabbit_types.erl
index 3e2b5ba0c17de17e41dc8aa86e6715089f2744ab..29a3ef92a18a0581b96bbe55b18376b9308c4d03 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_types).
 
 -include("rabbit.hrl").
 
--ifdef(use_specs).
-
 -export_type([maybe/1, info/0, infos/0, info_key/0, info_keys/0,
               message/0, msg_id/0, basic_message/0,
               delivery/0, content/0, decoded_content/0, undecoded_content/0,
               username/0, password/0, password_hash/0,
               ok/1, error/1, ok_or_error/1, ok_or_error2/2, ok_pid_or_error/0,
               channel_exit/0, connection_exit/0, mfargs/0, proc_name/0,
-              proc_type_and_name/0]).
+              proc_type_and_name/0, timestamp/0]).
 
 -type(maybe(T) :: T | 'none').
+-type(timestamp() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}).
 -type(vhost() :: binary()).
 -type(ctag() :: binary()).
 
 
 -type(proc_name() :: term()).
 -type(proc_type_and_name() :: {atom(), proc_name()}).
-
--endif. % use_specs
similarity index 82%
rename from rabbitmq-server/src/rabbit_writer.erl
rename to deps/rabbit_common/src/rabbit_writer.erl
index 7cba7170a4cdc7c797fb55d351b38547ed4f4c62..3884f1a1e99cad6f7d54ccb38282d05b327a3ebe 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_writer).
+
+%% This module backs writer processes ("writers"). The responsibility of
+%% a writer is to serialise protocol methods and write them to the socket.
+%% Every writer is associated with a channel and normally it's the channel
+%% that delegates method delivery to it. However, rabbit_reader
+%% (connection process) can use this module's functions to send data
+%% on channel 0, which is only used for connection negotiation and
+%% other "special" purposes.
+%%
+%% This module provides multiple functions that send protocol commands,
+%% including some that are credit flow-aware.
+%%
+%% Writers perform internal buffering. When the amount of data
+%% buffered exceeds a threshold, a socket flush is performed.
+%% See FLUSH_THRESHOLD for details.
+%%
+%% When a socket write fails, writer will exit.
+
 -include("rabbit.hrl").
 -include("rabbit_framing.hrl").
 
 %% internal
 -export([enter_mainloop/2, mainloop/2, mainloop1/2]).
 
--record(wstate, {sock, channel, frame_max, protocol, reader,
-                 stats_timer, pending}).
+-record(wstate, {
+    %% socket (port)
+    sock,
+    %% channel number
+    channel,
+    %% connection-negotiated frame_max setting
+    frame_max,
+    %% see #connection.protocol in rabbit_reader
+    protocol,
+    %% connection (rabbit_reader) process
+    reader,
+    %% statistics emission timer
+    stats_timer,
+    %% data pending delivery (between socket
+    %% flushes)
+    pending
+}).
 
 -define(HIBERNATE_AFTER, 5000).
 
 %%---------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start/6 ::
+-spec start
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid(),
-         rabbit_types:proc_name())
-        -> rabbit_types:ok(pid())).
--spec(start_link/6 ::
+         rabbit_types:proc_name()) ->
+            rabbit_types:ok(pid()).
+-spec start_link
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid(),
-         rabbit_types:proc_name())
-        -> rabbit_types:ok(pid())).
--spec(start/7 ::
+         rabbit_types:proc_name()) ->
+            rabbit_types:ok(pid()).
+-spec start
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid(),
-         rabbit_types:proc_name(), boolean())
-        -> rabbit_types:ok(pid())).
--spec(start_link/7 ::
+         rabbit_types:proc_name(), boolean()) ->
+            rabbit_types:ok(pid()).
+-spec start_link
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid(),
-         rabbit_types:proc_name(), boolean())
-        -> rabbit_types:ok(pid())).
-
--spec(system_code_change/4 :: (_,_,_,_) -> {'ok',_}).
--spec(system_continue/3 :: (_,_,#wstate{}) -> any()).
--spec(system_terminate/4 :: (_,_,_,_) -> none()).
-
--spec(send_command/2 ::
-        (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(send_command/3 ::
-        (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content())
-        -> 'ok').
--spec(send_command_sync/2 ::
-        (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(send_command_sync/3 ::
-        (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content())
-        -> 'ok').
--spec(send_command_and_notify/4 ::
-        (pid(), pid(), pid(), rabbit_framing:amqp_method_record())
-        -> 'ok').
--spec(send_command_and_notify/5 ::
+         rabbit_types:proc_name(), boolean()) ->
+            rabbit_types:ok(pid()).
+
+-spec system_code_change(_,_,_,_) -> {'ok',_}.
+-spec system_continue(_,_,#wstate{}) -> any().
+-spec system_terminate(_,_,_,_) -> none().
+
+-spec send_command(pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec send_command
+        (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content()) ->
+            'ok'.
+-spec send_command_sync(pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec send_command_sync
+        (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content()) ->
+            'ok'.
+-spec send_command_and_notify
+        (pid(), pid(), pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec send_command_and_notify
         (pid(), pid(), pid(), rabbit_framing:amqp_method_record(),
-         rabbit_types:content())
-        -> 'ok').
--spec(send_command_flow/2 ::
-        (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(send_command_flow/3 ::
-        (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content())
-        -> 'ok').
--spec(flush/1 :: (pid()) -> 'ok').
--spec(internal_send_command/4 ::
+         rabbit_types:content()) ->
+            'ok'.
+-spec send_command_flow(pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec send_command_flow
+        (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content()) ->
+            'ok'.
+-spec flush(pid()) -> 'ok'.
+-spec internal_send_command
         (rabbit_net:socket(), rabbit_channel:channel_number(),
-         rabbit_framing:amqp_method_record(), rabbit_types:protocol())
-        -> 'ok').
--spec(internal_send_command/6 ::
+         rabbit_framing:amqp_method_record(), rabbit_types:protocol()) ->
+            'ok'.
+-spec internal_send_command
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          rabbit_framing:amqp_method_record(), rabbit_types:content(),
-         non_neg_integer(), rabbit_types:protocol())
-        -> 'ok').
-
--endif.
+         non_neg_integer(), rabbit_types:protocol()) ->
+            'ok'.
 
 %%---------------------------------------------------------------------------
 
@@ -311,8 +336,11 @@ internal_send_command_async(MethodRecord, Content,
     rabbit_basic:maybe_gc_large_msg(Content),
     maybe_flush(State#wstate{pending = [Frames | Pending]}).
 
+%% When the amount of protocol method data buffered exceeds
+%% this threshold, a socket flush is performed.
+%%
 %% This magic number is the tcp-over-ethernet MSS (1460) minus the
-%% minimum size of a AMQP basic.deliver method frame (24) plus basic
+%% minimum size of a AMQP 0-9-1 basic.deliver method frame (24) plus basic
 %% content header (22). The idea is that we want to flush just before
 %% exceeding the MSS.
 -define(FLUSH_THRESHOLD, 1414).
diff --git a/deps/rabbit_common/src/rand_compat.erl b/deps/rabbit_common/src/rand_compat.erl
new file mode 100644 (file)
index 0000000..e304fc8
--- /dev/null
@@ -0,0 +1,124 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rand_compat).
+
+%% We don't want warnings about the use of erlang:now/0 in
+%% this module.
+-compile(nowarn_deprecated_function).
+
+%% Declare versioned functions to allow dynamic code loading,
+%% depending on the Erlang version running. See 'code_version.erl' for details
+-erlang_version_support([
+    {18, [
+        {seed, 1, seed_pre_18, seed_post_18},
+        {seed, 2, seed_pre_18, seed_post_18},
+        {uniform, 0, uniform_pre_18, uniform_post_18},
+        {uniform, 1, uniform_pre_18, uniform_post_18},
+        {uniform_s, 1, uniform_s_pre_18, uniform_s_post_18},
+        {uniform_s, 2, uniform_s_pre_18, uniform_s_post_18}
+      ]}
+  ]).
+
+-export([
+    seed/1, seed_pre_18/1, seed_post_18/1,
+    seed/2, seed_pre_18/2, seed_post_18/2,
+    uniform/0, uniform_pre_18/0, uniform_post_18/0,
+    uniform/1, uniform_pre_18/1, uniform_post_18/1,
+    uniform_s/1, uniform_s_pre_18/1, uniform_s_post_18/1,
+    uniform_s/2, uniform_s_pre_18/2, uniform_s_post_18/2
+  ]).
+
+-define(IS_ALG(A), (A =:= exs64 orelse A =:= exsplus orelse A =:= exs1024)).
+
+%% export_seed_s/1 can't be implemented with `random`.
+%% export_seed_s/2. can't be implemented with `random`.
+
+%% normal_s/1 can't be implemented with `random`.
+%% normal_s/2. can't be implemented with `random`.
+
+%% seed/1.
+
+seed(AlgOrExpState) ->
+    code_version:update(?MODULE),
+    ?MODULE:seed(AlgOrExpState).
+
+seed_pre_18(Alg) when ?IS_ALG(Alg) -> random:seed();
+seed_pre_18(ExpState)              -> random:seed(ExpState).
+seed_post_18(AlgOrExpState)        -> rand:seed(AlgOrExpState).
+
+%% seed/2.
+
+seed(Alg, ExpState) ->
+    code_version:update(?MODULE),
+    ?MODULE:seed(Alg, ExpState).
+
+seed_pre_18(_Alg, ExpState) -> random:seed(ExpState).
+seed_post_18(Alg, ExpState) -> rand:seed(Alg, ExpState).
+
+%% seed_s/1 can't be implemented with `random`.
+%% seed_s/2. can't be implemented with `random`.
+
+%% uniform/0.
+
+uniform() ->
+    code_version:update(?MODULE),
+    ?MODULE:uniform().
+
+ensure_random_seed() ->
+    case get(random_seed) of
+        undefined ->
+            random:seed(erlang:phash2([node()]),
+                        time_compat:monotonic_time(),
+                        time_compat:unique_integer());
+        _ -> ok
+    end.
+
+uniform_pre_18()  ->
+    ensure_random_seed(),
+    random:uniform().
+
+uniform_post_18() -> rand:uniform().
+
+%% uniform/1.
+
+uniform(N) ->
+    code_version:update(?MODULE),
+    ?MODULE:uniform(N).
+
+uniform_pre_18(N)  ->
+    ensure_random_seed(),
+    random:uniform(N).
+
+uniform_post_18(N) -> rand:uniform(N).
+
+%% uniform_s/1.
+
+uniform_s(State) ->
+    code_version:update(?MODULE),
+    ?MODULE:uniform_s(State).
+
+uniform_s_pre_18(State)  -> random:uniform_s(State).
+uniform_s_post_18(State) -> rand:uniform_s(State).
+
+%% uniform_s/2.
+
+uniform_s(N, State) ->
+    code_version:update(?MODULE),
+    ?MODULE:uniform_s(N, State).
+
+uniform_s_pre_18(N, State)  -> random:uniform_s(N, State).
+uniform_s_post_18(N, State) -> rand:uniform_s(N, State).
diff --git a/deps/rabbit_common/src/ssl_compat.erl b/deps/rabbit_common/src/ssl_compat.erl
new file mode 100644 (file)
index 0000000..e007667
--- /dev/null
@@ -0,0 +1,92 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(ssl_compat).
+
+%% We don't want warnings about the use of erlang:now/0 in
+%% this module.
+-compile(nowarn_deprecated_function).
+
+%% Declare versioned functions to allow dynamic code loading,
+%% depending on the Erlang version running. See 'code_version.erl' for details
+-erlang_version_support(
+   [{18, [{connection_information, 1, connection_information_pre_18,
+           connection_information_post_18},
+          {connection_information, 2, connection_information_pre_18,
+           connection_information_post_18}]}
+   ]).
+
+-export([connection_information/1,
+         connection_information_pre_18/1,
+         connection_information_post_18/1,
+         connection_information/2,
+         connection_information_pre_18/2,
+         connection_information_post_18/2]).
+
+connection_information(SslSocket) ->
+    code_version:update(?MODULE),
+    ssl_compat:connection_information(SslSocket).
+
+connection_information_post_18(SslSocket) ->
+    ssl:connection_information(SslSocket).
+
+connection_information_pre_18(SslSocket) ->
+    case ssl:connection_info(SslSocket) of
+        {ok, {ProtocolVersion, CipherSuite}} ->
+            {ok, [{protocol, ProtocolVersion},
+                  {cipher_suite, CipherSuite}]};
+        {error, Reason} ->
+            {error, Reason}
+    end.
+
+connection_information(SslSocket, Items) ->
+    code_version:update(?MODULE),
+    ssl_compat:connection_information(SslSocket, Items).
+
+connection_information_post_18(SslSocket, Items) ->
+    ssl:connection_information(SslSocket, Items).
+
+connection_information_pre_18(SslSocket, Items) ->
+    WantProtocolVersion = lists:member(protocol, Items),
+    WantCipherSuite = lists:member(cipher_suite, Items),
+    if
+        WantProtocolVersion orelse WantCipherSuite ->
+            case ssl:connection_info(SslSocket) of
+                {ok, {ProtocolVersion, CipherSuite}} ->
+                    filter_information_items(ProtocolVersion,
+                                             CipherSuite,
+                                             Items,
+                                             []);
+                {error, Reason} ->
+                    {error, Reason}
+            end;
+        true ->
+            {ok, []}
+    end.
+
+filter_information_items(ProtocolVersion, CipherSuite, [protocol | Rest],
+  Result) ->
+    filter_information_items(ProtocolVersion, CipherSuite, Rest,
+      [{protocol, ProtocolVersion} | Result]);
+filter_information_items(ProtocolVersion, CipherSuite, [cipher_suite | Rest],
+  Result) ->
+    filter_information_items(ProtocolVersion, CipherSuite, Rest,
+      [{cipher_suite, CipherSuite} | Result]);
+filter_information_items(ProtocolVersion, CipherSuite, [_ | Rest],
+  Result) ->
+    filter_information_items(ProtocolVersion, CipherSuite, Rest, Result);
+filter_information_items(_ProtocolVersion, _CipherSuite, [], Result) ->
+    {ok, lists:reverse(Result)}.
similarity index 95%
rename from rabbitmq-server/src/supervisor2.erl
rename to deps/rabbit_common/src/supervisor2.erl
index 7b9421eb3ed44a9236401c9a549f1610adb5d497..22b78689ab7bc1fdd253e42b42f707bfcb55d8ac 100644 (file)
         terminate/2, code_change/3]).
 -export([try_again_restart/3]).
 
-%%--------------------------------------------------------------------------
--ifdef(use_specs).
 -export_type([child_spec/0, startchild_ret/0, strategy/0, sup_name/0]).
--endif.
-%%--------------------------------------------------------------------------
 
--ifdef(use_specs).
 -type child()    :: 'undefined' | pid().
 -type child_id() :: term().
 -type mfargs()   :: {M :: module(), F :: atom(), A :: [term()] | undefined}.
 
 -type strategy() :: 'one_for_all' | 'one_for_one'
                   | 'rest_for_one' | 'simple_one_for_one'.
--endif.
 
 %%--------------------------------------------------------------------------
 
--ifdef(use_specs).
 -record(child, {% pid is undefined when child is not running
                pid = undefined :: child() | {restarting,pid()} | [pid()],
                name            :: child_id(),
                child_type      :: worker(),
                modules = []    :: modules()}).
 -type child_rec() :: #child{}.
--else.
--record(child, {
-               pid = undefined,
-               name,
-               mfargs,
-               restart_type,
-               shutdown,
-               child_type,
-               modules = []}).
--endif.
 
 -define(DICT, dict).
 -define(SETS, sets).
 -define(SET, set).
 
--ifdef(use_specs).
+-include("include/old_builtin_types.hrl").
+
 -record(state, {name,
                strategy               :: strategy(),
                children = []          :: [child_rec()],
-               dynamics               :: ?DICT:?DICT() | ?SETS:?SET(),
+               dynamics               :: ?DICT_TYPE() | ?SET_TYPE(),
                intensity              :: non_neg_integer(),
                period                 :: pos_integer(),
                restarts = [],
                module,
                args}).
 -type state() :: #state{}.
--else.
--record(state, {name,
-               strategy,
-               children = [],
-               dynamics,
-               intensity,
-               period,
-               restarts = [],
-               module,
-               args}).
--endif.
 
 -define(is_simple(State), State#state.strategy =:= simple_one_for_one).
 -define(is_permanent(R), ((R =:= permanent) orelse
 -define(is_explicit_restart(R),
         R == {shutdown, restart}).
 
--ifdef(use_specs).
 -callback init(Args :: term()) ->
     {ok, {{RestartStrategy :: strategy(),
            MaxR            :: non_neg_integer(),
            MaxT            :: non_neg_integer()},
            [ChildSpec :: child_spec()]}}
     | ignore.
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{init,1}];
-behaviour_info(_Other) ->
-    undefined.
 
--endif.
 -define(restarting(_Pid_), {restarting,_Pid_}).
 
 %%% ---------------------------------------------------
@@ -188,7 +151,7 @@ behaviour_info(_Other) ->
 %%% Servers/processes should/could also be built using gen_server.erl.
 %%% SupName = {local, atom()} | {global, atom()}.
 %%% ---------------------------------------------------
--ifdef(use_specs).
+
 -type startlink_err() :: {'already_started', pid()}
                          | {'shutdown', term()}
                          | term().
@@ -198,23 +161,21 @@ behaviour_info(_Other) ->
       Module :: module(),
       Args :: term().
 
--endif.
 start_link(Mod, Args) ->
     gen_server:start_link(?MODULE, {self, Mod, Args}, []).
--ifdef(use_specs).
+
 -spec start_link(SupName, Module, Args) -> startlink_ret() when
       SupName :: sup_name(),
       Module :: module(),
       Args :: term().
--endif.
+
 start_link(SupName, Mod, Args) ->
     gen_server:start_link(SupName, ?MODULE, {SupName, Mod, Args}, []).
+
 %%% ---------------------------------------------------
 %%% Interface functions.
 %%% ---------------------------------------------------
--ifdef(use_specs).
+
 -type startchild_err() :: 'already_present'
                        | {'already_started', Child :: child()} | term().
 -type startchild_ret() :: {'ok', Child :: child()}
@@ -224,11 +185,10 @@ start_link(SupName, Mod, Args) ->
 -spec start_child(SupRef, ChildSpec) -> startchild_ret() when
       SupRef :: sup_ref(),
       ChildSpec :: child_spec() | (List :: [term()]).
--endif.
+
 start_child(Supervisor, ChildSpec) ->
     call(Supervisor, {start_child, ChildSpec}).
 
--ifdef(use_specs).
 -spec restart_child(SupRef, Id) -> Result when
       SupRef :: sup_ref(),
       Id :: child_id(),
@@ -237,17 +197,16 @@ start_child(Supervisor, ChildSpec) ->
               | {'error', Error},
       Error :: 'running' | 'restarting' | 'not_found' | 'simple_one_for_one' |
               term().
--endif.
+
 restart_child(Supervisor, Name) ->
     call(Supervisor, {restart_child, Name}).
 
--ifdef(use_specs).
 -spec delete_child(SupRef, Id) -> Result when
       SupRef :: sup_ref(),
       Id :: child_id(),
       Result :: 'ok' | {'error', Error},
       Error :: 'running' | 'restarting' | 'not_found' | 'simple_one_for_one'.
--endif.
+
 delete_child(Supervisor, Name) ->
     call(Supervisor, {delete_child, Name}).
 
@@ -257,28 +216,26 @@ delete_child(Supervisor, Name) ->
 %%          Note that the child is *always* terminated in some
 %%          way (maybe killed).
 %%-----------------------------------------------------------------
--ifdef(use_specs).
+
 -spec terminate_child(SupRef, Id) -> Result when
       SupRef :: sup_ref(),
       Id :: pid() | child_id(),
       Result :: 'ok' | {'error', Error},
       Error :: 'not_found' | 'simple_one_for_one'.
--endif.
+
 terminate_child(Supervisor, Name) ->
     call(Supervisor, {terminate_child, Name}).
 
--ifdef(use_specs).
 -spec which_children(SupRef) -> [{Id,Child,Type,Modules}] when
       SupRef :: sup_ref(),
       Id :: child_id() | undefined,
       Child :: child() | 'restarting',
       Type :: worker(),
       Modules :: modules().
--endif.
+
 which_children(Supervisor) ->
     call(Supervisor, which_children).
 
--ifdef(use_specs).
 -spec count_children(SupRef) -> PropListOfCounts when
       SupRef :: sup_ref(),
       PropListOfCounts :: [Count],
@@ -286,15 +243,14 @@ which_children(Supervisor) ->
              | {active, ActiveProcessCount :: non_neg_integer()}
              | {supervisors, ChildSupervisorCount :: non_neg_integer()}
              |{workers, ChildWorkerCount :: non_neg_integer()}.
--endif.
+
 count_children(Supervisor) ->
     call(Supervisor, count_children).
 
--ifdef(use_specs).
 -spec find_child(Supervisor, Name) -> [pid()] when
       Supervisor :: sup_ref(),
       Name :: child_id().
--endif.
+
 find_child(Supervisor, Name) ->
     [Pid || {Name1, Pid, _Type, _Modules} <- which_children(Supervisor),
             Name1 =:= Name].
@@ -302,11 +258,10 @@ find_child(Supervisor, Name) ->
 call(Supervisor, Req) ->
     gen_server:call(Supervisor, Req, infinity).
 
--ifdef(use_specs).
 -spec check_childspecs(ChildSpecs) -> Result when
       ChildSpecs :: [child_spec()],
       Result :: 'ok' | {'error', Error :: term()}.
--endif.
+
 check_childspecs(ChildSpecs) when is_list(ChildSpecs) ->
     case check_startspec(ChildSpecs) of
        {ok, _} -> ok;
@@ -316,12 +271,12 @@ check_childspecs(X) -> {error, {badarg, X}}.
 
 %%%-----------------------------------------------------------------
 %%% Called by timer:apply_after from restart/2
--ifdef(use_specs).
+
 -spec try_again_restart(SupRef, Child, Reason) -> ok when
       SupRef :: sup_ref(),
       Child :: child_id() | pid(),
       Reason :: term().
--endif.
+
 try_again_restart(Supervisor, Child, Reason) ->
     cast(Supervisor, {try_again_restart, Child, Reason}).
 
@@ -329,11 +284,11 @@ cast(Supervisor, Req) ->
     gen_server:cast(Supervisor, Req).
 
 %%% ---------------------------------------------------
-%%% 
+%%%
 %%% Initialize the supervisor.
-%%% 
+%%%
 %%% ---------------------------------------------------
--ifdef(use_specs).
+
 -type init_sup_name() :: sup_name() | 'self'.
 
 -type stop_rsn() :: {'shutdown', term()}
@@ -344,7 +299,7 @@ cast(Supervisor, Req) ->
 
 -spec init({init_sup_name(), module(), [term()]}) ->
         {'ok', state()} | 'ignore' | {'stop', stop_rsn()}.
--endif.
+
 init({SupName, Mod, Args}) ->
     process_flag(trap_exit, true),
     case Mod:init(Args) of
@@ -448,14 +403,14 @@ do_start_child_i(M, F, A) ->
     end.
 
 %%% ---------------------------------------------------
-%%% 
+%%%
 %%% Callback functions.
-%%% 
+%%%
 %%% ---------------------------------------------------
--ifdef(use_specs).
+
 -type call() :: 'which_children' | 'count_children' | {_, _}.  % XXX: refine
 -spec handle_call(call(), term(), state()) -> {'reply', term(), state()}.
--endif.
+
 handle_call({start_child, EArgs}, _From, State) when ?is_simple(State) ->
     Child = hd(State#state.children),
     #child{mfargs = {M, F, A}} = Child,
@@ -638,10 +593,10 @@ count_child(#child{pid = Pid, child_type = supervisor},
 %%% If a restart attempt failed, this message is sent via
 %%% timer:apply_after(0,...) in order to give gen_server the chance to
 %%% check it's inbox before trying again.
--ifdef(use_specs).
+
 -spec handle_cast({try_again_restart, child_id() | pid(), term()}, state()) ->
                         {'noreply', state()} | {stop, shutdown, state()}.
--endif.
+
 handle_cast({try_again_restart,Pid,Reason}, #state{children=[Child]}=State)
   when ?is_simple(State) ->
     RT = Child#child.restart_type,
@@ -667,10 +622,10 @@ handle_cast({try_again_restart,Name,Reason}, State) ->
 %%
 %% Take care of terminated children.
 %%
--ifdef(use_specs).
+
 -spec handle_info(term(), state()) ->
         {'noreply', state()} | {'stop', 'shutdown', state()}.
--endif.
+
 handle_info({'EXIT', Pid, Reason}, State) ->
     case restart_child(Pid, Reason, State) of
        {ok, State1} ->
@@ -698,16 +653,16 @@ handle_info({delayed_restart, {RestartType, Reason, Child}}, State) ->
 %% this is important.
 
 handle_info(Msg, State) ->
-    error_logger:error_msg("Supervisor received unexpected message: ~p~n", 
+    error_logger:error_msg("Supervisor received unexpected message: ~p~n",
                           [Msg]),
     {noreply, State}.
 
 %%
 %% Terminate this server.
 %%
--ifdef(use_specs).
+
 -spec terminate(term(), state()) -> 'ok'.
--endif.
+
 terminate(_Reason, #state{children=[Child]} = State) when ?is_simple(State) ->
     terminate_dynamic_children(Child, dynamics_db(Child#child.restart_type,
                                                   State#state.dynamics),
@@ -724,10 +679,10 @@ terminate(_Reason, State) ->
 %% NOTE: This requires that the init function of the call-back module
 %%       does not have any side effects.
 %%
--ifdef(use_specs).
+
 -spec code_change(term(), state(), term()) ->
         {'ok', state()} | {'error', term()}.
--endif.
+
 code_change(_, State, _) ->
     case (State#state.module):init(State#state.args) of
        {ok, {SupFlags, StartSpec}} ->
@@ -795,7 +750,7 @@ update_chsp(OldCh, Children) ->
        NewC ->
            {ok, NewC}
     end.
-    
+
 %%% ---------------------------------------------------
 %%% Start a new child.
 %%% ---------------------------------------------------
@@ -1049,13 +1004,13 @@ do_terminate(Child, _SupName) ->
     Child#child{pid = undefined}.
 
 %%-----------------------------------------------------------------
-%% Shutdowns a child. We must check the EXIT value 
+%% Shutdowns a child. We must check the EXIT value
 %% of the child, because it might have died with another reason than
-%% the wanted. In that case we want to report the error. We put a 
-%% monitor on the child an check for the 'DOWN' message instead of 
-%% checking for the 'EXIT' message, because if we check the 'EXIT' 
-%% message a "naughty" child, who does unlink(Sup), could hang the 
-%% supervisor. 
+%% the wanted. In that case we want to report the error. We put a
+%% monitor on the child an check for the 'DOWN' message instead of
+%% checking for the 'EXIT' message, because if we check the 'EXIT'
+%% message a "naughty" child, who does unlink(Sup), could hang the
+%% supervisor.
 %% Returns: ok | {error, OtherReason}  (this should be reported)
 %%-----------------------------------------------------------------
 shutdown(Pid, brutal_kill) ->
@@ -1068,14 +1023,14 @@ shutdown(Pid, brutal_kill) ->
                {'DOWN', _MRef, process, Pid, OtherReason} ->
                    {error, OtherReason}
            end;
-       {error, Reason} ->      
+       {error, Reason} ->
            {error, Reason}
     end;
 shutdown(Pid, Time) ->
     case monitor_child(Pid) of
        ok ->
            exit(Pid, shutdown), %% Try to shutdown gracefully
-           receive 
+           receive
                {'DOWN', _MRef, process, Pid, shutdown} ->
                    ok;
                {'DOWN', _MRef, process, Pid, OtherReason} ->
@@ -1087,14 +1042,14 @@ shutdown(Pid, Time) ->
                            {error, OtherReason}
                    end
            end;
-       {error, Reason} ->      
+       {error, Reason} ->
            {error, Reason}
     end.
 
 %% Help function to shutdown/2 switches from link to monitor approach
 monitor_child(Pid) ->
-    
-    %% Do the monitor operation first so that if the child dies 
+
+    %% Do the monitor operation first so that if the child dies
     %% before the monitoring is done causing a 'DOWN'-message with
     %% reason noproc, we will get the real reason in the 'EXIT'-message
     %% unless a naughty child has already done unlink...
@@ -1104,19 +1059,19 @@ monitor_child(Pid) ->
     receive
        %% If the child dies before the unlik we must empty
        %% the mail-box of the 'EXIT'-message and the 'DOWN'-message.
-       {'EXIT', Pid, Reason} -> 
-           receive 
+       {'EXIT', Pid, Reason} ->
+           receive
                {'DOWN', _, process, Pid, _} ->
                    {error, Reason}
            end
-    after 0 -> 
+    after 0 ->
            %% If a naughty child did unlink and the child dies before
-           %% monitor the result will be that shutdown/2 receives a 
+           %% monitor the result will be that shutdown/2 receives a
            %% 'DOWN'-message with reason noproc.
            %% If the child should die after the unlink there
            %% will be a 'DOWN'-message with a correct reason
-           %% that will be handled in shutdown/2. 
-           ok   
+           %% that will be handled in shutdown/2.
+           ok
     end.
 
 
@@ -1443,8 +1398,8 @@ validChildType(What) -> throw({invalid_child_type, What}).
 
 validName(_Name) -> true.
 
-validFunc({M, F, A}) when is_atom(M), 
-                          is_atom(F), 
+validFunc({M, F, A}) when is_atom(M),
+                          is_atom(F),
                           is_list(A) -> true;
 validFunc(Func)                      -> throw({invalid_mfa, Func}).
 
@@ -1462,7 +1417,7 @@ validDelay(Delay) when is_number(Delay),
                        Delay >= 0 -> true;
 validDelay(What)                  -> throw({invalid_delay, What}).
 
-validShutdown(Shutdown, _) 
+validShutdown(Shutdown, _)
   when is_integer(Shutdown), Shutdown > 0 -> true;
 validShutdown(infinity, _)             -> true;
 validShutdown(brutal_kill, _)          -> true;
@@ -1488,11 +1443,11 @@ validMods(Mods) -> throw({invalid_modules, Mods}).
 %%% Returns: {ok, State'} | {terminate, State'}
 %%% ------------------------------------------------------
 
-add_restart(State) ->  
+add_restart(State) ->
     I = State#state.intensity,
     P = State#state.period,
     R = State#state.restarts,
-    Now = erlang:now(),
+    Now = time_compat:monotonic_time(),
     R1 = add_restart([Now|R], Now, P),
     State1 = State#state{restarts = R1},
     case length(R1) of
@@ -1513,26 +1468,13 @@ add_restart([], _, _) ->
     [].
 
 inPeriod(Time, Now, Period) ->
-    case difference(Time, Now) of
+    case time_compat:convert_time_unit(Now - Time, native, seconds) of
        T when T > Period ->
            false;
        _ ->
            true
     end.
 
-%%
-%% Time = {MegaSecs, Secs, MicroSecs} (NOTE: MicroSecs is ignored)
-%% Calculate the time elapsed in seconds between two timestamps.
-%% If MegaSecs is equal just subtract Secs.
-%% Else calculate the Mega difference and add the Secs difference,
-%% note that Secs difference can be negative, e.g.
-%%      {827, 999999, 676} diff {828, 1, 653753} == > 2 secs.
-%%
-difference({TimeM, TimeS, _}, {CurM, CurS, _}) when CurM > TimeM ->
-    ((CurM - TimeM) * 1000000) + (CurS - TimeS);
-difference({_, TimeS, _}, {_, CurS, _}) ->
-    CurS - TimeS.
-
 %%% ------------------------------------------------------
 %%% Error and progress reporting.
 %%% ------------------------------------------------------
diff --git a/deps/rabbit_common/src/time_compat.erl b/deps/rabbit_common/src/time_compat.erl
new file mode 100644 (file)
index 0000000..6604431
--- /dev/null
@@ -0,0 +1,347 @@
+%%
+%% %CopyrightBegin%
+%% 
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
+%% 
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%%     http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%% 
+%% %CopyrightEnd%
+%%
+
+%%
+%% If your code need to be able to execute on ERTS versions both
+%% earlier and later than 7.0, the best approach is to use the new
+%% time API introduced in ERTS 7.0 and implement a fallback
+%% solution using the old primitives to be used on old ERTS
+%% versions. This way your code can automatically take advantage
+%% of the improvements in the API when available. This is an
+%% example of how to implement such an API, but it can be used
+%% as is if you want to. Just add (a preferrably renamed version of)
+%% this module to your project, and call the API via this module
+%% instead of calling the BIFs directly.
+%%
+
+-module(time_compat).
+
+%% We don't want warnings about the use of erlang:now/0 in
+%% this module.
+-compile(nowarn_deprecated_function).
+%%
+%% We don't use
+%%   -compile({nowarn_deprecated_function, [{erlang, now, 0}]}).
+%% since this will produce warnings when compiled on systems
+%% where it has not yet been deprecated.
+%%
+
+%% Declare versioned functions to allow dynamic code loading,
+%% depending on the Erlang version running. See 'code_version.erl' for details
+-erlang_version_support(
+   [{18,
+     [{monotonic_time, 0, monotonic_time_pre_18, monotonic_time_post_18},
+      {monotonic_time, 1, monotonic_time_pre_18, monotonic_time_post_18},
+      {erlang_system_time, 0, erlang_system_time_pre_18, erlang_system_time_post_18},
+      {erlang_system_time, 1, erlang_system_time_pre_18, erlang_system_time_post_18},
+      {os_system_time, 0, os_system_time_pre_18, os_system_time_post_18},
+      {os_system_time, 1, os_system_time_pre_18, os_system_time_post_18},
+      {time_offset, 0, time_offset_pre_18, time_offset_post_18},
+      {time_offset, 1, time_offset_pre_18, time_offset_post_18},
+      {convert_time_unit, 3, convert_time_unit_pre_18, convert_time_unit_post_18},
+      {timestamp, 0, timestamp_pre_18, timestamp_post_18},
+      {unique_integer, 0, unique_integer_pre_18, unique_integer_post_18},
+      {unique_integer, 1, unique_integer_pre_18, unique_integer_post_18}]}
+   ]).
+
+-export([monotonic_time/0,
+         monotonic_time_pre_18/0,
+         monotonic_time_post_18/0,
+         monotonic_time/1,
+         monotonic_time_pre_18/1,
+         monotonic_time_post_18/1,
+         erlang_system_time/0,
+         erlang_system_time_pre_18/0,
+         erlang_system_time_post_18/0,
+         erlang_system_time/1,
+         erlang_system_time_pre_18/1,
+         erlang_system_time_post_18/1,
+         os_system_time/0,
+         os_system_time_pre_18/0,
+         os_system_time_post_18/0,
+         os_system_time/1,
+         os_system_time_pre_18/1,
+         os_system_time_post_18/1,
+         time_offset/0,
+         time_offset_pre_18/0,
+         time_offset_post_18/0,
+         time_offset/1,
+         time_offset_pre_18/1,
+         time_offset_post_18/1,
+         convert_time_unit/3,
+         convert_time_unit_pre_18/3,
+         convert_time_unit_post_18/3,
+         timestamp/0,
+         timestamp_pre_18/0,
+         timestamp_post_18/0,
+         unique_integer/0,
+         unique_integer_pre_18/0,
+         unique_integer_post_18/0,
+         unique_integer/1,
+         unique_integer_pre_18/1,
+         unique_integer_post_18/1,
+         monitor/2,
+         system_info/1,
+         system_flag/2]).
+
+monotonic_time() ->
+    code_version:update(?MODULE),
+    time_compat:monotonic_time().
+
+monotonic_time_post_18() ->
+       erlang:monotonic_time().
+
+monotonic_time_pre_18() ->
+    erlang_system_time_fallback().
+
+monotonic_time(Unit) ->
+    code_version:update(?MODULE),
+    time_compat:monotonic_time(Unit).
+
+monotonic_time_post_18(Unit) ->
+    erlang:monotonic_time(Unit).
+
+monotonic_time_pre_18(Unit) ->
+    %% Use Erlang system time as monotonic time
+    STime = erlang_system_time_fallback(),
+    convert_time_unit_fallback(STime, native, Unit).
+
+erlang_system_time() ->
+    code_version:update(?MODULE),
+    time_compat:erlang_system_time().
+
+erlang_system_time_post_18() ->
+       erlang:system_time().
+
+erlang_system_time_pre_18() ->
+    erlang_system_time_fallback().
+
+erlang_system_time(Unit) ->
+    code_version:update(?MODULE),
+    time_compat:erlang_system_time(Unit).
+
+erlang_system_time_post_18(Unit) ->
+    erlang:system_time(Unit).
+
+erlang_system_time_pre_18(Unit) ->
+    STime = erlang_system_time_fallback(),
+    convert_time_unit_fallback(STime, native, Unit).
+
+os_system_time() ->
+    code_version:update(?MODULE),
+    time_compat:os_system_time().
+
+os_system_time_post_18() ->
+       os:system_time().
+
+os_system_time_pre_18() ->
+    os_system_time_fallback().
+
+os_system_time(Unit) ->
+    code_version:update(?MODULE),
+    time_compat:os_system_time(Unit).
+
+os_system_time_post_18(Unit) ->
+    os:system_time(Unit).
+
+os_system_time_pre_18(Unit) ->
+    STime = os_system_time_fallback(),
+    convert_time_unit_fallback(STime, native, Unit).
+
+time_offset() ->
+    code_version:update(?MODULE),
+    time_compat:time_offset().
+
+time_offset_post_18() ->
+       erlang:time_offset().
+
+time_offset_pre_18() ->
+    %% Erlang system time and Erlang monotonic
+    %% time are always aligned
+    0.
+
+time_offset(Unit) ->
+    code_version:update(?MODULE),
+    time_compat:time_offset(Unit).
+
+time_offset_post_18(Unit) ->
+    erlang:time_offset(Unit).
+
+time_offset_pre_18(Unit) ->
+    _ = integer_time_unit(Unit),
+    %% Erlang system time and Erlang monotonic
+    %% time are always aligned
+    0.
+
+convert_time_unit(Time, FromUnit, ToUnit) ->
+    code_version:update(?MODULE),
+    time_compat:convert_time_unit(Time, FromUnit, ToUnit).
+
+convert_time_unit_post_18(Time, FromUnit, ToUnit) ->
+    try
+        erlang:convert_time_unit(Time, FromUnit, ToUnit)
+    catch
+        error:Error ->
+           erlang:error(Error, [Time, FromUnit, ToUnit])
+    end.
+
+convert_time_unit_pre_18(Time, FromUnit, ToUnit) ->
+    try
+        convert_time_unit_fallback(Time, FromUnit, ToUnit)
+    catch
+               _:_ ->
+                   erlang:error(badarg, [Time, FromUnit, ToUnit])
+    end.
+
+timestamp() ->
+    code_version:update(?MODULE),
+    time_compat:timestamp().
+
+timestamp_post_18() ->
+       erlang:timestamp().
+
+timestamp_pre_18() ->
+    erlang:now().
+
+unique_integer() ->
+    code_version:update(?MODULE),
+    time_compat:unique_integer().
+
+unique_integer_post_18() ->
+       erlang:unique_integer().
+
+unique_integer_pre_18() ->
+    {MS, S, US} = erlang:now(),
+    (MS*1000000+S)*1000000+US.
+
+unique_integer(Modifiers) ->
+    code_version:update(?MODULE),
+    time_compat:unique_integer(Modifiers).
+
+unique_integer_post_18(Modifiers) ->
+    erlang:unique_integer(Modifiers).
+
+unique_integer_pre_18(Modifiers) ->
+    case is_valid_modifier_list(Modifiers) of
+               true ->
+                   %% now() converted to an integer
+                   %% fullfill the requirements of
+                   %% all modifiers: unique, positive,
+                   %% and monotonic...
+                   {MS, S, US} = erlang:now(),
+                   (MS*1000000+S)*1000000+US;
+               false ->
+                   erlang:error(badarg, [Modifiers])
+    end.
+
+monitor(Type, Item) ->
+    try
+       erlang:monitor(Type, Item)
+    catch
+       error:Error ->
+           case {Error, Type, Item} of
+               {badarg, time_offset, clock_service} ->
+                   %% Time offset is final and will never change.
+                   %% Return a dummy reference, there will never
+                   %% be any need for 'CHANGE' messages...
+                   make_ref();
+               _ ->
+                   erlang:error(Error, [Type, Item])
+           end
+    end.
+
+system_info(Item) ->
+    try
+       erlang:system_info(Item)
+    catch
+       error:badarg ->
+           case Item of
+               time_correction ->
+                   case erlang:system_info(tolerant_timeofday) of
+                       enabled -> true;
+                       disabled -> false
+                   end;
+               time_warp_mode ->
+                   no_time_warp;
+               time_offset ->
+                   final;
+               NotSupArg when NotSupArg == os_monotonic_time_source;
+                              NotSupArg == os_system_time_source;
+                              NotSupArg == start_time;
+                              NotSupArg == end_time ->
+                   %% Cannot emulate this...
+                   erlang:error(notsup, [NotSupArg]);
+               _ ->
+                   erlang:error(badarg, [Item])
+           end;
+       error:Error ->
+           erlang:error(Error, [Item])
+    end.
+
+system_flag(Flag, Value) ->
+    try
+       erlang:system_flag(Flag, Value)
+    catch
+       error:Error ->
+           case {Error, Flag, Value} of
+               {badarg, time_offset, finalize} ->
+                   %% Time offset is final
+                   final;
+               _ ->
+                   erlang:error(Error, [Flag, Value])
+           end
+    end.
+
+%%
+%% Internal functions
+%%
+
+integer_time_unit(native) -> 1000*1000;
+integer_time_unit(nano_seconds) -> 1000*1000*1000;
+integer_time_unit(micro_seconds) -> 1000*1000;
+integer_time_unit(milli_seconds) -> 1000;
+integer_time_unit(seconds) -> 1;
+integer_time_unit(I) when is_integer(I), I > 0 -> I;
+integer_time_unit(BadRes) -> erlang:error(badarg, [BadRes]).
+
+erlang_system_time_fallback() ->
+    {MS, S, US} = erlang:now(),
+    (MS*1000000+S)*1000000+US.
+
+os_system_time_fallback() ->
+    {MS, S, US} = os:timestamp(),
+    (MS*1000000+S)*1000000+US.
+
+convert_time_unit_fallback(Time, FromUnit, ToUnit) ->
+    FU = integer_time_unit(FromUnit),
+    TU = integer_time_unit(ToUnit),
+    case Time < 0 of
+       true -> TU*Time - (FU - 1);
+       false -> TU*Time
+    end div FU.
+
+is_valid_modifier_list([positive|Ms]) ->
+    is_valid_modifier_list(Ms);
+is_valid_modifier_list([monotonic|Ms]) ->
+    is_valid_modifier_list(Ms);
+is_valid_modifier_list([]) ->
+    true;
+is_valid_modifier_list(_) ->
+    false.
diff --git a/deps/rabbitmq_amqp1_0/CODE_OF_CONDUCT.md b/deps/rabbitmq_amqp1_0/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_amqp1_0/CONTRIBUTING.md b/deps/rabbitmq_amqp1_0/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_amqp1_0/Makefile b/deps/rabbitmq_amqp1_0/Makefile
new file mode 100644 (file)
index 0000000..e320af4
--- /dev/null
@@ -0,0 +1,57 @@
+PROJECT = rabbitmq_amqp1_0
+
+DEPS = rabbit_common rabbit amqp_client
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+EXTRA_SOURCES += include/rabbit_amqp1_0_framing.hrl \
+                src/rabbit_amqp1_0_framing0.erl
+
+.DEFAULT_GOAL = all
+$(PROJECT).d:: $(EXTRA_SOURCES)
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
+
+# --------------------------------------------------------------------
+# Framing sources generation.
+# --------------------------------------------------------------------
+
+PYTHON       ?= python
+CODEGEN       = $(CURDIR)/codegen.py
+CODEGEN_DIR  ?= $(DEPS_DIR)/rabbitmq_codegen
+CODEGEN_AMQP  = $(CODEGEN_DIR)/amqp_codegen.py
+CODEGEN_SPECS = spec/messaging.xml spec/security.xml spec/transport.xml \
+               spec/transactions.xml
+
+include/rabbit_amqp1_0_framing.hrl:: $(CODEGEN) $(CODEGEN_AMQP) \
+    $(CODEGEN_SPECS)
+       $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \
+         $(PYTHON) $(CODEGEN) hrl $(CODEGEN_SPECS) > $@
+
+src/rabbit_amqp1_0_framing0.erl:: $(CODEGEN) $(CODEGEN_AMQP) \
+    $(CODEGEN_SPECS)
+       $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \
+         $(PYTHON) $(CODEGEN) erl $(CODEGEN_SPECS) > $@
+
+clean:: clean-extra-sources
+
+clean-extra-sources:
+       $(gen_verbose) rm -f $(EXTRA_SOURCES)
+
+distclean:: distclean-dotnet-tests distclean-java-tests
+
+distclean-dotnet-tests:
+       $(gen_verbose) cd test/system_SUITE_data/dotnet-tests && \
+               rm -rf bin obj && \
+               rm -f project.lock.json TestResult.xml
+
+distclean-java-tests:
+       $(gen_verbose) cd test/system_SUITE_data/java-tests && mvn clean
similarity index 72%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/codegen.py
rename to deps/rabbitmq_amqp1_0/codegen.py
index 145cfe5bbcbfd15b780db298e323ba6f3d146c7a..7dbcfef0d1f281bde761f22ef04dcd70720ef69d 100755 (executable)
@@ -1,4 +1,7 @@
 #!/usr/bin/env python
+
+from __future__ import print_function
+
 import sys
 import os
 import re
@@ -37,50 +40,50 @@ class AMQPDefines:
                         dom.getElementsByTagName('choice')]
 
 def print_erl(types):
-    print """-module(rabbit_amqp1_0_framing0).
+    print("""-module(rabbit_amqp1_0_framing0).
 -export([record_for/1, fields/1, encode/1, symbol_for/1, number_for/1]).
--include("rabbit_amqp1_0.hrl")."""
+-include("rabbit_amqp1_0.hrl").""")
     for t in types:
-        print """record_for({symbol, <<"%s">>}) ->
-    #'v1_0.%s'{};""" % (t.desc, t.name)
+        print("""record_for({symbol, <<"%s">>}) ->
+    #'v1_0.%s'{};""" % (t.desc, t.name))
         if t.code:
-            print """record_for({_, %d}) ->
-    #'v1_0.%s'{};""" % (t.number, t.name)
-            print "%% %s\n" % t.code
+            print("""record_for({_, %d}) ->
+    #'v1_0.%s'{};""" % (t.number, t.name))
+            print("%% %s\n" % t.code)
 
-    print """record_for(Other) -> exit({unknown, Other}).
+    print("""record_for(Other) -> exit({unknown, Other}).
 
-"""
+""")
     for t in types:
-        print """fields(#'v1_0.%s'{}) -> record_info(fields, 'v1_0.%s');""" % (t.name, t.name)
-    print """fields(_Other) -> unknown.
+        print("""fields(#'v1_0.%s'{}) -> record_info(fields, 'v1_0.%s');""" % (t.name, t.name))
+    print("""fields(_Other) -> unknown.
 
-"""
+""")
     for t in types:
-        print """encode(Frame = #'v1_0.%s'{}) ->
-    rabbit_amqp1_0_framing:encode_described('%s', %s, Frame);""" % (t.name, t.source, t.number)
-    print """encode(undefined) -> null;
+        print("""encode(Frame = #'v1_0.%s'{}) ->
+    rabbit_amqp1_0_framing:encode_described('%s', %s, Frame);""" % (t.name, t.source, t.number))
+    print("""encode(undefined) -> null;
 encode(Other) -> Other.
 
-"""
+""")
     for t in types:
-        print """symbol_for(#'v1_0.%s'{}) ->
-    {symbol, <<"%s">>};""" % (t.name, t.desc)
-    print """symbol_for(Other) -> exit({unknown, Other}).
+        print("""symbol_for(#'v1_0.%s'{}) ->
+    {symbol, <<"%s">>};""" % (t.name, t.desc))
+    print("""symbol_for(Other) -> exit({unknown, Other}).
 
-"""
+""")
     for t in types:
-        print """number_for(#'v1_0.%s'{}) ->
-    {ulong, %s};""" % (t.name, t.number)
-    print """number_for(Other) -> exit({unknown, Other})."""
+        print("""number_for(#'v1_0.%s'{}) ->
+    {ulong, %s};""" % (t.name, t.number))
+    print("""number_for(Other) -> exit({unknown, Other}).""")
 
 def print_hrl(types, defines):
     for t in types:
-        print """-record('v1_0.%s', {%s}).""" % (t.name, ", ".join(t.fields))
+        print("""-record('v1_0.%s', {%s}).""" % (t.name, ", ".join(t.fields)))
         print_define(t.define(), 'symbol')
     for d in defines:
         if len(d.options) > 0:
-            print """ %% %s""" % (d.name)
+            print(""" %% %s""" % (d.name))
             for opt in d.options:
                 print_define(opt, d.source)
 
@@ -90,7 +93,7 @@ def print_define(opt, source):
         quoted = '<<"%s">>' % value
     else:
         quoted = value
-    print """-define(V_1_0_%s, {%s, %s}).""" % (name, source, quoted)
+    print("""-define(V_1_0_%s, {%s, %s}).""" % (name, source, quoted))
 
 def want_type(el):
     descriptors = el.getElementsByTagName('descriptor')
diff --git a/deps/rabbitmq_amqp1_0/erlang.mk b/deps/rabbitmq_amqp1_0/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_amqp1_0/rabbitmq-components.mk b/deps/rabbitmq_amqp1_0/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 79%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_generator.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_generator.erl
index 2277cb37d5ff019241790b4cc6dc9a2b26c2d518..cff8495244da602d2d770286276a1df8d221eae8 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_binary_generator).
 
 -include("rabbit_amqp1_0.hrl").
 
--ifdef(use_specs).
--spec(generate/1 :: (tuple()) -> iolist()).
--spec(build_frame/2 :: (int(), iolist()) -> iolist()).
--endif.
+-spec generate(tuple()) -> iolist().
+-spec build_frame(integer(), iolist()) -> iolist().
 
 -define(AMQP_FRAME_TYPE, 0).
 -define(DOFF, 2).
@@ -88,9 +86,14 @@ generate({list, List}) ->
     Count = length(List),
     Compound = lists:map(fun generate/1, List),
     S = iolist_size(Compound),
-    %% S < 256 -> Count < 256
-    if S > 255 -> [<<16#d0, (S + 4):32/unsigned, Count:32/unsigned>>, Compound];
-       true    -> [<<16#c0, (S + 1):8/unsigned, Count:8/unsigned>>,   Compound]
+    %% If the list contains less than (256 - 1) elements and if the
+    %% encoded size (including the encoding of "Count", thus S + 1
+    %% in the test) is less than 256 bytes, we use the short form.
+    %% Otherwise, we use the large form.
+    if Count >= (256 - 1) orelse (S + 1) >= 256 ->
+            [<<16#d0, (S + 4):32/unsigned, Count:32/unsigned>>, Compound];
+        true ->
+            [<<16#c0, (S + 1):8/unsigned,  Count:8/unsigned>>,  Compound]
     end;
 
 generate({map, ListOfPairs}) ->
@@ -100,8 +103,11 @@ generate({map, ListOfPairs}) ->
                                   (generate(Val))]
                          end, ListOfPairs),
     S = iolist_size(Compound),
-    if S > 255 -> [<<16#d1,(S + 4):32,Count:32>>, Compound];
-       true    -> [<<16#c1,(S + 1):8,Count:8>>,   Compound]
+    %% See generate({list, ...}) for an explanation of this test.
+    if Count >= (256 - 1) orelse (S + 1) >= 256 ->
+            [<<16#d1, (S + 4):32, Count:32>>, Compound];
+        true ->
+            [<<16#c1, (S + 1):8,  Count:8>>,  Compound]
     end;
 
 generate({array, Type, List}) ->
@@ -109,9 +115,11 @@ generate({array, Type, List}) ->
     Body = iolist_to_binary(
              [constructor(Type), [generate(Type, I) || I <- List]]),
     S = size(Body),
-    %% S < 256 -> Count < 256
-    if S > 255 -> [<<16#f0, (S + 4):32/unsigned, Count:32/unsigned>>, Body];
-       true    -> [<<16#e0, (S + 1):8/unsigned, Count:8/unsigned>>,   Body]
+    %% See generate({list, ...}) for an explanation of this test.
+    if Count >= (256 - 1) orelse (S + 1) >= 256 ->
+            [<<16#f0, (S + 4):32/unsigned, Count:32/unsigned>>, Body];
+        true ->
+            [<<16#e0, (S + 1):8/unsigned,  Count:8/unsigned>>,  Body]
     end;
 
 generate({as_is, TypeCode, Bin}) ->
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_parser.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_parser.erl
index 677ebaa5597443cf959713edd81640d5a823daa2..5a9f49f31158cd76a9aaf63d58a0cc5d9b9a0aa6 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_binary_parser).
@@ -20,9 +20,7 @@
 
 -include("rabbit_amqp1_0.hrl").
 
--ifdef(use_specs).
--spec(parse/1 :: (binary()) -> tuple()).
--endif.
+-spec parse(binary()) -> tuple().
 
 parse_all(ValueBin) when is_binary(ValueBin) ->
     lists:reverse(parse_all([], parse(ValueBin))).
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_channel.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl
index 1a968034684a1844a6d515a7c3e1213d9f942bdc..2e5a4b2095fc0e153f59876078d29ee0d2a7374f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_channel).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_framing.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_framing.erl
index a4b1b609dd3abebc55d7c1b150709bdbcf6e1da1..0ff6b433e6f78f19521d299317e98ed155973351 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_framing).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_incoming_link.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl
index ab5d33118affce4b32bec89df711a7f62a163821..46748b1483895565f3debb198cf45e1cb002e389 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_incoming_link).
@@ -193,6 +193,8 @@ ensure_target(Target = #'v1_0.target'{address       = Address,
                                       timeout       = _Timeout},
               Link = #incoming_link{ route_state = RouteState }, DCh) ->
     DeclareParams = [{durable, rabbit_amqp1_0_link_util:durable(Durable)},
+                     {exclusive, false},
+                     {auto_delete, false},
                      {check_exchange, true},
                      {nowait, false}],
     case Dynamic of
similarity index 83%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_link_util.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl
index 2c34fd287f903c3f4a81ae7c1a075849d794b7ff..85e003de6f7b3f635aa796baa45fb01c2597b82d 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_link_util).
@@ -39,8 +39,12 @@ outcomes(Source) ->
                           _         -> DO
                       end,
                 Os1 = case Os of
-                          undefined -> ?OUTCOMES;
-                          _         -> Os
+                          undefined    -> ?OUTCOMES;
+                          {list, Syms} -> Syms;
+                          Bad1         -> rabbit_amqp1_0_util:protocol_error(
+                                            ?V_1_0_AMQP_ERROR_NOT_IMPLEMENTED,
+                                            "Outcomes not supported: ~p",
+                                            [Bad1])
                       end,
                 {DO1, Os1};
             _ ->
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_message.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl
index c48e9c5b5001ca20e5439b02df6f7eb8b3e43361..19d5fc089a3d98d87fb14af272c8939ccb9bcafe 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_message).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_outgoing_link.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl
index c418ba4bc1527bd851fdc7cf1fc009e52fe96d82..f0366c8e8078cb3b88abba8c674b6f0aa1e9e2b2 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_outgoing_link).
@@ -157,6 +157,8 @@ ensure_source(Source = #'v1_0.source'{address       = Address,
                                       timeout       = _Timeout},
               Link = #outgoing_link{ route_state = RouteState }, DCh) ->
     DeclareParams = [{durable, rabbit_amqp1_0_link_util:durable(Durable)},
+                     {exclusive, false},
+                     {auto_delete, false},
                      {check_exchange, true},
                      {nowait, false}],
     case Dynamic of
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_reader.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl
index 06664ff6d90207889a14b59111acfd02ee8d142b..dd460101ca8696820b188640fc864add58169d7c 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_reader).
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl
index 3f507be4e447712bf3615b39d6aadc9227ee8388..5a3ae025aceaf2dac1dfc9e3037fee92af389f4f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_session).
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_process.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl
index 2f9be46971a0ab12a9718f54d06316298b1251fd..68cc49c3387eba3212538d6af9edfb4dc7919b00 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_session_process).
similarity index 85%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl
index bdc8fa928fbefe38e076a980a81c81c34cf1022d..ee09d76c62be6492fe56e616ac606c9b4095c555 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_session_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
 -export_type([start_link_args/0]).
 
--type(start_link_args() ::
+-type start_link_args() ::
         {rabbit_types:protocol(), rabbit_net:socket(),
          rabbit_channel:channel_number(), non_neg_integer(), pid(),
-         rabbit_access_control:username(), rabbit_types:vhost(), pid()}).
-
--spec(start_link/1 :: (start_link_args()) -> {'ok', pid(), pid()}).
-
--endif.
+         rabbit_access_control:username(), rabbit_types:vhost(), pid()}.
 
+-spec start_link(start_link_args()) -> {'ok', pid(), pid()}.
 
 %%----------------------------------------------------------------------------
 start_link({rabbit_amqp1_0_framing, Sock, Channel, FrameMax, ReaderPid,
@@ -50,14 +45,14 @@ start_link({rabbit_amqp1_0_framing, Sock, Channel, FrameMax, ReaderPid,
           {writer, {rabbit_amqp1_0_writer, start_link,
                     [Sock, Channel, FrameMax, rabbit_amqp1_0_framing,
                      ReaderPid]},
-           intrinsic, ?MAX_WAIT, worker, [rabbit_amqp1_0_writer]}),
+           intrinsic, ?WORKER_WAIT, worker, [rabbit_amqp1_0_writer]}),
     {ok, ChannelPid} =
         supervisor2:start_child(
           SupPid,
           {channel, {rabbit_amqp1_0_session_process, start_link,
                      [{Channel, ReaderPid, WriterPid, Username, VHost, FrameMax,
                        adapter_info(Sock), Collector}]},
-           intrinsic, ?MAX_WAIT, worker, [rabbit_amqp1_0_session_process]}),
+           intrinsic, ?WORKER_WAIT, worker, [rabbit_amqp1_0_session_process]}),
     {ok, SupPid, ChannelPid}.
 
 %%----------------------------------------------------------------------------
similarity index 82%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup_sup.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl
index dca73120383619fe161ccb2a57406965e8bd97c1..4fff8b47a2a275fcacb2c96aedc4353a79c5108d 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_session_sup_sup).
 
 %%----------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start_session/2 :: (pid(), rabbit_amqp1_0_session_sup:start_link_args()) ->
-                              {'ok', pid(), pid()}).
-
--endif.
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+-spec start_session(pid(), rabbit_amqp1_0_session_sup:start_link_args()) ->
+                              {'ok', pid(), pid()}.
 
 %%----------------------------------------------------------------------------
 
similarity index 81%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_util.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl
index 4a79502f00ad064aca4cc847afeb46a73add7405..7c6786f8aa0ea2317e301fe2aa8a9d278b9bf1a8 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_util).
 -export([protocol_error/3]).
 -export([serial_add/2, serial_compare/2, serial_diff/2]).
 
--ifdef(use_specs).
-
 -export_type([serial_number/0]).
--type(serial_number() :: non_neg_integer()).
--type(serial_compare_result() :: 'equal' | 'less' | 'greater').
-
--spec(serial_add/2 :: (serial_number(), non_neg_integer()) ->
-             serial_number()).
--spec(serial_compare/2 :: (serial_number(), serial_number()) ->
-             serial_compare_result()).
--spec(serial_diff/2 :: (serial_number(), serial_number()) ->
-             integer()).
-
--endif.
+-type serial_number() :: non_neg_integer().
+-type serial_compare_result() :: 'equal' | 'less' | 'greater'.
 
+-spec serial_add(serial_number(), non_neg_integer()) ->
+             serial_number().
+-spec serial_compare(serial_number(), serial_number()) ->
+             serial_compare_result().
+-spec serial_diff(serial_number(), serial_number()) ->
+             integer().
 
 protocol_error(Condition, Msg, Args) ->
     exit(#'v1_0.error'{
similarity index 93%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_writer.erl
rename to deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl
index 399e4e4b7a736b3fa3a66f19aed277ef75af82e9..03b02fb9a4707a6deb6347b9e7c9d515ab7bdb7c 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_amqp1_0_writer).
 
 %%---------------------------------------------------------------------------
 
--ifdef(use_specs).
-
--spec(start/5 ::
+-spec start
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid())
-        -> rabbit_types:ok(pid())).
--spec(start_link/5 ::
+        -> rabbit_types:ok(pid()).
+-spec start_link
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid())
-        -> rabbit_types:ok(pid())).
--spec(start/6 ::
+        -> rabbit_types:ok(pid()).
+-spec start
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid(), boolean())
-        -> rabbit_types:ok(pid())).
--spec(start_link/6 ::
+        -> rabbit_types:ok(pid()).
+-spec start_link
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          non_neg_integer(), rabbit_types:protocol(), pid(), boolean())
-        -> rabbit_types:ok(pid())).
--spec(send_command/2 ::
-        (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(send_command/3 ::
+        -> rabbit_types:ok(pid()).
+-spec send_command
+        (pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec send_command
         (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content())
-        -> 'ok').
--spec(send_command_sync/2 ::
-        (pid(), rabbit_framing:amqp_method_record()) -> 'ok').
--spec(send_command_sync/3 ::
+        -> 'ok'.
+-spec send_command_sync
+        (pid(), rabbit_framing:amqp_method_record()) -> 'ok'.
+-spec send_command_sync
         (pid(), rabbit_framing:amqp_method_record(), rabbit_types:content())
-        -> 'ok').
--spec(send_command_and_notify/4 ::
+        -> 'ok'.
+-spec send_command_and_notify
         (pid(), pid(), pid(), rabbit_framing:amqp_method_record())
-        -> 'ok').
--spec(send_command_and_notify/5 ::
+        -> 'ok'.
+-spec send_command_and_notify
         (pid(), pid(), pid(), rabbit_framing:amqp_method_record(),
          rabbit_types:content())
-        -> 'ok').
--spec(internal_send_command/4 ::
+        -> 'ok'.
+-spec internal_send_command
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          rabbit_framing:amqp_method_record(), rabbit_types:protocol())
-        -> 'ok').
--spec(internal_send_command/6 ::
+        -> 'ok'.
+-spec internal_send_command
         (rabbit_net:socket(), rabbit_channel:channel_number(),
          rabbit_framing:amqp_method_record(), rabbit_types:content(),
          non_neg_integer(), rabbit_types:protocol())
-        -> 'ok').
-
--endif.
+        -> 'ok'.
 
 %%---------------------------------------------------------------------------
 
similarity index 71%
rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbitmq_amqp1_0.app.src
rename to deps/rabbitmq_amqp1_0/src/rabbitmq_amqp1_0.app.src
index da6909756782b7dfb023a9f1c3fc68fa4a8bff43..da7914ec320b0bc44d621c83b7302b46e4a5e79d 100644 (file)
@@ -1,9 +1,9 @@
 {application, rabbitmq_amqp1_0,
  [{description, "AMQP 1.0 support for RabbitMQ"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {env, [{default_user, "guest"},
          {default_vhost, <<"/">>},
          {protocol_strict_mode, false}]},
-  {applications, [kernel, stdlib, rabbit, amqp_client]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, amqp_client]}]}.
diff --git a/deps/rabbitmq_auth_backend_ldap/CODE_OF_CONDUCT.md b/deps/rabbitmq_auth_backend_ldap/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_auth_backend_ldap/CONTRIBUTING.md b/deps/rabbitmq_auth_backend_ldap/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_auth_backend_ldap/Makefile b/deps/rabbitmq_auth_backend_ldap/Makefile
new file mode 100644 (file)
index 0000000..c33e998
--- /dev/null
@@ -0,0 +1,16 @@
+PROJECT = rabbitmq_auth_backend_ldap
+
+DEPS = rabbit_common rabbit
+TEST_DEPS = ct_helper rabbitmq_ct_helpers amqp_client
+dep_ct_helper = git https://github.com/extend/ct_helper.git master
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_auth_backend_ldap/README-authorisation.md b/deps/rabbitmq_auth_backend_ldap/README-authorisation.md
new file mode 100644 (file)
index 0000000..303825f
--- /dev/null
@@ -0,0 +1 @@
+See [RabbitMQ LDAP authentication backend docs](http://www.rabbitmq.com/ldap.html).
diff --git a/deps/rabbitmq_auth_backend_ldap/README-tests.md b/deps/rabbitmq_auth_backend_ldap/README-tests.md
new file mode 100644 (file)
index 0000000..5196276
--- /dev/null
@@ -0,0 +1,16 @@
+# Running LDAP Backend Tests
+
+If you have [Vagrant](https://www.vagrantup.com) installed you
+can simply `vagrant up` from the root of the project directory.
+This will start a vagrant box with OpenLDAP running, accessible
+on local port 3890.
+Alternatively run OpenLDAP locally on port 3890 and use
+`example/setup.sh` to create the appropriate ldap databases.
+
+IMPORTANT: this will wipe out your local OpenLDAP installation!
+The setup script currently needs to be executed between test suite runs,
+too.
+
+The test setup will seed the LDAP database with the required objects.
+
+Run `make test` to run the complete test suite.
diff --git a/deps/rabbitmq_auth_backend_ldap/README.md b/deps/rabbitmq_auth_backend_ldap/README.md
new file mode 100644 (file)
index 0000000..2cadcd8
--- /dev/null
@@ -0,0 +1,33 @@
+# RabbitMQ LDAP Authentication Backend
+
+This plugin provides [authentication and authorisation backends](http://rabbitmq.com/access-control.html)
+for RabbitMQ that use LDAP.
+
+## Installation
+
+This plugin ships with reasonably recent RabbitMQ versions
+(e.g. `3.3.0` or later). Enable it with
+
+    rabbitmq-plugins enable rabbitmq_auth_backend_ldap
+
+## Documentation
+
+[See LDAP guide](http://www.rabbitmq.com/ldap.html) on rabbitmq.com.
+
+
+## Building from Source
+
+See [Plugin Development guide](http://www.rabbitmq.com/plugin-development.html).
+
+TL;DR: running
+
+    make dist
+
+will build the plugin and put build artifacts under the `./plugins` directory.
+
+
+## Copyright and License
+
+(c) Pivotal Software Inc, 2007-20016
+
+Released under the MPL, the same license as RabbitMQ.
diff --git a/deps/rabbitmq_auth_backend_ldap/Vagrantfile b/deps/rabbitmq_auth_backend_ldap/Vagrantfile
new file mode 100644 (file)
index 0000000..543ff50
--- /dev/null
@@ -0,0 +1,76 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+# All Vagrant configuration is done below. The "2" in Vagrant.configure
+# configures the configuration version (we support older styles for
+# backwards compatibility). Please don't change it unless you know what
+# you're doing.
+Vagrant.configure(2) do |config|
+  # The most common configuration options are documented and commented below.
+  # For a complete reference, please see the online documentation at
+  # https://docs.vagrantup.com.
+
+  # Every Vagrant development environment requires a box. You can search for
+  # boxes at https://atlas.hashicorp.com/search.
+  config.vm.box = "ubuntu/trusty64"
+
+  # Disable automatic box update checking. If you disable this, then
+  # boxes will only be checked for updates when the user runs
+  # `vagrant box outdated`. This is not recommended.
+  # config.vm.box_check_update = false
+
+  # Create a forwarded port mapping which allows access to a specific port
+  # within the machine from a port on the host machine. In the example below,
+  # accessing "localhost:8080" will access port 80 on the guest machine.
+  config.vm.network "forwarded_port", guest: 389, host: 3890
+
+  # Create a private network, which allows host-only access to the machine
+  # using a specific IP.
+  # config.vm.network "private_network", ip: "192.168.33.10"
+
+  # Create a public network, which generally matched to bridged network.
+  # Bridged networks make the machine appear as another physical device on
+  # your network.
+  # config.vm.network "public_network"
+
+  # Share an additional folder to the guest VM. The first argument is
+  # the path on the host to the actual folder. The second argument is
+  # the path on the guest to mount the folder. And the optional third
+  # argument is a set of non-required options.
+  # config.vm.synced_folder "../data", "/vagrant_data"
+
+  # Provider-specific configuration so you can fine-tune various
+  # backing providers for Vagrant. These expose provider-specific options.
+  # Example for VirtualBox:
+  #
+  # config.vm.provider "virtualbox" do |vb|
+  #   # Display the VirtualBox GUI when booting the machine
+  #   vb.gui = true
+  #
+  #   # Customize the amount of memory on the VM:
+  #   vb.memory = "1024"
+  # end
+  #
+  # View the documentation for the provider you are using for more
+  # information on available options.
+
+  # Define a Vagrant Push strategy for pushing to Atlas. Other push strategies
+  # such as FTP and Heroku are also available. See the documentation at
+  # https://docs.vagrantup.com/v2/push/atlas.html for more information.
+  # config.push.define "atlas" do |push|
+  #   push.app = "YOUR_ATLAS_USERNAME/YOUR_APPLICATION_NAME"
+  # end
+
+  # Enable provisioning with a shell script. Additional provisioners such as
+  # Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the
+  # documentation for more information about their specific syntax and use.
+  config.vm.provision "shell", inline: "sudo apt-get -y update"
+  config.vm.provision "file", source: "example", destination: "~"
+  config.vm.provision "shell", inline: "/bin/sh /home/vagrant/example/setup.sh"
+  # config.vm.provision "shell", inline: <<-SHELL
+  #   sudo apt-get update
+  #   sudo apt-get --yes install slapd ldap-utils
+  #   sleep(1)
+
+  # SHELL
+end
diff --git a/deps/rabbitmq_auth_backend_ldap/erlang.mk b/deps/rabbitmq_auth_backend_ldap/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 71%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/global.ldif
rename to deps/rabbitmq_auth_backend_ldap/example/global.ldif
index 431a1f1c3e44d91d7bdc32142695381fce9650b5..373d9d9951b443f21fdd08fe63259280a2ab92c5 100644 (file)
@@ -9,19 +9,19 @@ dn: olcDatabase=bdb,cn=config
 objectClass: olcDatabaseConfig
 objectClass: olcBdbConfig
 olcDatabase: bdb
-# Domain name (e.g. example.com)
-olcSuffix: dc=example,dc=com
+# Domain name (e.g. rabbitmq.com)
+olcSuffix: dc=rabbitmq,dc=com
 # Location on system where database is stored
 olcDbDirectory: /var/lib/ldap
 # Manager of the database
-olcRootDN: cn=admin,dc=example,dc=com
+olcRootDN: cn=admin,dc=rabbitmq,dc=com
 olcRootPW: admin
 olcAccess: to attrs=userPassword
   by self write
   by anonymous auth
-  by dn.base="cn=admin,dc=example,dc=com" write
+  by dn.base="cn=admin,dc=rabbitmq,dc=com" write
   by * none
 olcAccess: to *
   by self write
-  by dn.base="cn=admin,dc=example,dc=com" write
+  by dn.base="cn=admin,dc=rabbitmq,dc=com" write
   by * read
diff --git a/deps/rabbitmq_auth_backend_ldap/example/memberof_init.ldif b/deps/rabbitmq_auth_backend_ldap/example/memberof_init.ldif
new file mode 100644 (file)
index 0000000..6301e93
--- /dev/null
@@ -0,0 +1,17 @@
+dn: cn=module,cn=config
+cn: module
+objectClass: olcModuleList
+olcModuleLoad: memberof
+olcModulePath: /usr/lib/ldap
+
+dn: olcOverlay={0}memberof,olcDatabase={1}bdb,cn=config
+objectClass: olcConfig
+objectClass: olcMemberOf
+objectClass: olcOverlayConfig
+objectClass: top
+olcOverlay: memberof
+olcMemberOfDangling: ignore
+olcMemberOfRefInt: TRUE
+olcMemberOfGroupOC: groupOfNames
+olcMemberOfMemberAD: member
+olcMemberOfMemberOfAD: memberOf
diff --git a/deps/rabbitmq_auth_backend_ldap/example/refint_1.ldif b/deps/rabbitmq_auth_backend_ldap/example/refint_1.ldif
new file mode 100644 (file)
index 0000000..420f454
--- /dev/null
@@ -0,0 +1,3 @@
+dn: cn=module{1},cn=config
+add: olcmoduleload
+olcmoduleload: refint
\ No newline at end of file
diff --git a/deps/rabbitmq_auth_backend_ldap/example/refint_2.ldif b/deps/rabbitmq_auth_backend_ldap/example/refint_2.ldif
new file mode 100644 (file)
index 0000000..0955a1a
--- /dev/null
@@ -0,0 +1,7 @@
+dn: olcOverlay={1}refint,olcDatabase={1}bdb,cn=config
+objectClass: olcConfig
+objectClass: olcOverlayConfig
+objectClass: olcRefintConfig
+objectClass: top
+olcOverlay: {1}refint
+olcRefintAttribute: memberof member manager owner
diff --git a/deps/rabbitmq_auth_backend_ldap/example/seed.sh b/deps/rabbitmq_auth_backend_ldap/example/seed.sh
new file mode 100755 (executable)
index 0000000..9082bed
--- /dev/null
@@ -0,0 +1,8 @@
+#!/bin/sh -e
+
+DIR=$(dirname $0)
+
+sudo ldapadd -Y EXTERNAL -H ldapi:/// -f ${DIR}/global.ldif
+sudo ldapadd -Q -Y EXTERNAL -H ldapi:/// -f ${DIR}/memberof_init.ldif
+sudo ldapmodify -Q -Y EXTERNAL -H ldapi:/// -f ${DIR}/refint_1.ldif
+sudo ldapadd -Q -Y EXTERNAL -H ldapi:/// -f ${DIR}/refint_2.ldif
diff --git a/deps/rabbitmq_auth_backend_ldap/example/setup.sh b/deps/rabbitmq_auth_backend_ldap/example/setup.sh
new file mode 100755 (executable)
index 0000000..8194efe
--- /dev/null
@@ -0,0 +1,17 @@
+#!/bin/sh -e
+export DEBIAN_FRONTEND=noninteractive
+sudo apt-get --yes purge slapd
+sudo rm -rf /var/lib/ldap
+echo -e " \
+slapd    slapd/internal/generated_adminpw    password   openstack
+slapd    slapd/password2    password    openstack
+slapd    slapd/internal/adminpw    password openstack
+slapd    slapd/password1    password    openstack
+slapd    slapd/backend    select    BDB
+" | sudo debconf-set-selections
+sudo apt-get --yes install slapd ldap-utils
+sleep 1
+
+DIR=$(dirname $0)
+
+$DIR/seed.sh
diff --git a/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk b/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 54%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap.erl
rename to deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl
index 7a7e91f74bcf9506aec73533416d0d5d35e535f9..7915caf90284b3d1afeb2ea2d013a27886b3c06c 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_backend_ldap).
@@ -30,6 +30,7 @@
 -define(L(F, A),  log("LDAP "         ++ F, A)).
 -define(L1(F, A), log("    LDAP "     ++ F, A)).
 -define(L2(F, A), log("        LDAP " ++ F, A)).
+-define(SCRUBBED_CREDENTIAL,  "xxxx").
 
 -import(rabbit_misc, [pget/2]).
 
@@ -46,24 +47,27 @@ user_login_authentication(Username, []) ->
        [Username, log_result(R)]),
     R;
 
-user_login_authentication(Username, [{password, <<>>}]) ->
-    %% Password "" is special in LDAP, see
-    %% https://tools.ietf.org/html/rfc4513#section-5.1.2
-    ?L("CHECK: unauthenticated login for ~s", [Username]),
-    ?L("DECISION: unauthenticated login for ~s: denied", [Username]),
-    {refused, "user '~s' - unauthenticated bind not allowed", [Username]};
-
-user_login_authentication(User, [{password, PW}]) ->
-    ?L("CHECK: login for ~s", [User]),
-    R = case dn_lookup_when() of
-            prebind -> UserDN = username_to_dn_prebind(User),
-                       with_ldap({ok, {UserDN, PW}},
-                                 fun(L) -> do_login(User, UserDN,  PW, L) end);
-            _       -> with_ldap({ok, {fill_user_dn_pattern(User), PW}},
-                                 fun(L) -> do_login(User, unknown, PW, L) end)
-        end,
-    ?L("DECISION: login for ~s: ~p", [User, log_result(R)]),
-    R;
+user_login_authentication(Username, AuthProps) when is_list(AuthProps) ->
+    case pget(password, AuthProps) of
+        undefined -> user_login_authentication(Username, []);
+        <<>> ->
+            %% Password "" is special in LDAP, see
+            %% https://tools.ietf.org/html/rfc4513#section-5.1.2
+            ?L("CHECK: unauthenticated login for ~s", [Username]),
+            ?L("DECISION: unauthenticated login for ~s: denied", [Username]),
+            {refused, "user '~s' - unauthenticated bind not allowed", [Username]};
+        PW ->
+            ?L("CHECK: login for ~s", [Username]),
+            R = case dn_lookup_when() of
+                    prebind -> UserDN = username_to_dn_prebind(Username),
+                               with_ldap({ok, {UserDN, PW}},
+                                         login_fun(Username, UserDN, PW, AuthProps));
+                    _       -> with_ldap({ok, {fill_user_dn_pattern(Username), PW}},
+                                         login_fun(Username, unknown, PW, AuthProps))
+                end,
+            ?L("DECISION: login for ~s: ~p", [Username, log_result(R)]),
+            R
+    end;
 
 user_login_authentication(Username, AuthProps) ->
     exit({unknown_auth_props, Username, AuthProps}).
@@ -142,44 +146,66 @@ evaluate0({in_group, DNPattern, Desc}, Args,
     ?L1("evaluated in_group for \"~s\": ~p", [DN, R]),
     R;
 
+evaluate0({in_group_nested, DNPattern}, Args, User, LDAP) ->
+       evaluate({in_group_nested, DNPattern, "member", subtree},
+             Args, User, LDAP);
+evaluate0({in_group_nested, DNPattern, Desc}, Args, User, LDAP) ->
+    evaluate({in_group_nested, DNPattern, Desc, subtree},
+             Args, User, LDAP);
+evaluate0({in_group_nested, DNPattern, Desc, Scope}, Args,
+          #auth_user{impl = #impl{user_dn = UserDN}}, LDAP) ->
+    GroupsBase = case env(group_lookup_base) of
+        none -> env(dn_lookup_base);
+        B    -> B
+    end,
+    GroupDN = fill(DNPattern, Args),
+    EldapScope =
+        case Scope of
+            subtree      -> eldap:wholeSubtree();
+            singlelevel  -> eldap:singleLevel();
+            single_level -> eldap:singleLevel();
+            onelevel     -> eldap:singleLevel();
+            one_level    -> eldap:singleLevel()
+        end,
+    search_nested_group(LDAP, Desc, GroupsBase, EldapScope, UserDN, GroupDN, []);
+
 evaluate0({'not', SubQuery}, Args, User, LDAP) ->
     R = evaluate(SubQuery, Args, User, LDAP),
     ?L1("negated result to ~s", [R]),
     not R;
 
 evaluate0({'and', Queries}, Args, User, LDAP) when is_list(Queries) ->
-    R = lists:foldl(fun (Q,  true)  -> evaluate(Q, Args, User, LDAP);
-                        (_Q, false) -> false
+    R = lists:foldl(fun (Q,  true)    -> evaluate(Q, Args, User, LDAP);
+                        % Treat any non-true result as false
+                        (_Q, _Result) -> false
                     end, true, Queries),
     ?L1("'and' result: ~s", [R]),
     R;
 
 evaluate0({'or', Queries}, Args, User, LDAP) when is_list(Queries) ->
-    R = lists:foldl(fun (_Q, true)  -> true;
-                        (Q,  false) -> evaluate(Q, Args, User, LDAP)
+    R = lists:foldl(fun (_Q, true)    -> true;
+                        % Treat any non-true result as false
+                        (Q,  _Result) -> evaluate(Q, Args, User, LDAP)
                     end, false, Queries),
     ?L1("'or' result: ~s", [R]),
     R;
 
 evaluate0({equals, StringQuery1, StringQuery2}, Args, User, LDAP) ->
     safe_eval(fun (String1, String2) ->
-                      R = String1 =:= String2,
+                      R  = if String1 =:= String2 -> true;
+                              true -> is_multi_attr_member(String1, String2)
+                           end,
                       ?L1("evaluated equals \"~s\", \"~s\": ~s",
-                          [String1, String2, R]),
+                          [format_multi_attr(String1),
+                           format_multi_attr(String2), R]),
                       R
               end,
               evaluate(StringQuery1, Args, User, LDAP),
               evaluate(StringQuery2, Args, User, LDAP));
 
 evaluate0({match, StringQuery, REQuery}, Args, User, LDAP) ->
-    safe_eval(fun (String, RE) ->
-                      R = case re:run(String, RE) of
-                              {match, _} -> true;
-                              nomatch    -> false
-                          end,
-                      ?L1("evaluated match \"~s\" against RE \"~s\": ~s",
-                          [String, RE, R]),
-                      R
+    safe_eval(fun (String1, String2) ->
+                      do_match(String1, String2)
               end,
               evaluate(StringQuery, Args, User, LDAP),
               evaluate(REQuery, Args, User, LDAP));
@@ -196,16 +222,79 @@ evaluate0({attribute, DNPattern, AttributeName}, Args, _User, LDAP) ->
     DN = fill(DNPattern, Args),
     R = attribute(DN, AttributeName, LDAP),
     ?L1("evaluated attribute \"~s\" for \"~s\": ~p",
-        [AttributeName, DN, R]),
+        [AttributeName, DN, format_multi_attr(R)]),
     R;
 
 evaluate0(Q, Args, _User, _LDAP) ->
     {error, {unrecognised_query, Q, Args}}.
 
+search_groups(LDAP, Desc, GroupsBase, Scope, DN) ->
+    Filter = eldap:equalityMatch(Desc, DN),
+    case eldap:search(LDAP,
+                      [{base, GroupsBase},
+                       {filter, Filter},
+                       {attributes, ["dn"]},
+                       {scope, Scope}]) of
+        {error, _} = E ->
+            ?L("error searching for parent groups for \"~s\": ~p", [DN, E]),
+            [];
+        {ok, #eldap_search_result{entries = []}} ->
+            [];
+        {ok, #eldap_search_result{entries = Entries}} ->
+            [ON || #eldap_entry{object_name = ON} <- Entries]
+    end.
+
+search_nested_group(LDAP, Desc, GroupsBase, Scope, CurrentDN, TargetDN, Path) ->
+    case lists:member(CurrentDN, Path) of
+        true  ->
+            ?L("recursive cycle on DN ~s while searching for group ~s",
+               [CurrentDN, TargetDN]),
+            false;
+        false ->
+            GroupDNs = search_groups(LDAP, Desc, GroupsBase, Scope, CurrentDN),
+            case lists:member(TargetDN, GroupDNs) of
+                true  ->
+                    true;
+                false ->
+                    NextPath = [CurrentDN | Path],
+                    lists:any(fun(DN) ->
+                        search_nested_group(LDAP, Desc, GroupsBase, Scope,
+                                            DN, TargetDN, NextPath)
+                    end,
+                    GroupDNs)
+            end
+    end.
+
 safe_eval(_F, {error, _}, _)          -> false;
 safe_eval(_F, _,          {error, _}) -> false;
 safe_eval(F,  V1,         V2)         -> F(V1, V2).
 
+do_match(S1, S2) ->
+    case re:run(S1, S2) of
+        {match, _} -> log_match(S1, S2, R = true),
+                      R;
+        nomatch    ->
+            %% Do match bidirectionally, if intial RE consists of
+            %% multi attributes, else log match and return result.
+            case S2 of
+                S when length(S) > 1 ->
+                    R = case re:run(S2, S1) of
+                            {match, _} -> true;
+                            nomatch    -> false
+                        end,
+                    log_match(S2, S1, R),
+                    R;
+                _ ->
+                    log_match(S1, S2, R = false),
+                    R
+            end
+    end.
+
+log_match(String, RE, Result) ->
+    ?L1("evaluated match \"~s\" against RE \"~s\": ~s",
+        [format_multi_attr(String),
+         format_multi_attr(RE), Result]).
+
 object_exists(DN, Filter, LDAP) ->
     case eldap:search(LDAP,
                       [{base, DN},
@@ -223,11 +312,8 @@ attribute(DN, AttributeName, LDAP) ->
                       [{base, DN},
                        {filter, eldap:present("objectClass")},
                        {attributes, [AttributeName]}]) of
-        {ok, #eldap_search_result{entries = [#eldap_entry{attributes = A}]}} ->
-            case pget(AttributeName, A) of
-                [Attr] -> Attr;
-                _      -> {error, not_found}
-            end;
+        {ok, #eldap_search_result{entries = E = [#eldap_entry{}|_]}} ->
+            get_attributes(AttributeName, E);
         {ok, #eldap_search_result{entries = _}} ->
             {error, not_found};
         {error, _} = E ->
@@ -246,8 +332,10 @@ with_ldap(_Creds, _Fun, undefined) ->
 
 with_ldap({error, _} = E, _Fun, _State) ->
     E;
-%% TODO - ATM we create and destroy a new LDAP connection on every
-%% call. This could almost certainly be more efficient.
+
+%% TODO - while we now pool LDAP connections we don't make any attempt
+%% to avoid rebinding if the connection is already bound as the user
+%% of interest, so this could still be more efficient.
 with_ldap({ok, Creds}, Fun, Servers) ->
     Opts0 = [{port, env(port)}],
     Opts1 = case env(log) of
@@ -256,7 +344,15 @@ with_ldap({ok, Creds}, Fun, Servers) ->
                     rabbit_log:info(
                       "    LDAP connecting to servers: ~p~n", [Servers]),
                     [{log, fun(1, S, A) -> rabbit_log:warning(Pre ++ S, A);
-                              (2, S, A) -> rabbit_log:info   (Pre ++ S, A)
+                              (2, S, A) ->
+                                   rabbit_log:info(Pre ++ S, scrub_creds(A, []))
+                           end} | Opts0];
+                network_unsafe ->
+                    Pre = "    LDAP network traffic: ",
+                    rabbit_log:info(
+                      "    LDAP connecting to servers: ~p~n", [Servers]),
+                    [{log, fun(1, S, A) -> rabbit_log:warning(Pre ++ S, A);
+                              (2, S, A) -> rabbit_log:info(   Pre ++ S, A)
                            end} | Opts0];
                 _ ->
                     Opts0
@@ -266,33 +362,100 @@ with_ldap({ok, Creds}, Fun, Servers) ->
                infinity -> Opts1;
                MS       -> [{timeout, MS} | Opts1]
            end,
-    case eldap_open(Servers, Opts) of
+    worker_pool:submit(
+      ldap_pool,
+      fun () ->
+              case with_login(Creds, Servers, Opts, Fun) of
+                  {error, {gen_tcp_error, closed}} ->
+                      %% retry with new connection
+                      rabbit_log:warning("TCP connection to a LDAP server is already closed.~n"),
+                      purge_conn(Creds == anon, Servers, Opts),
+                      rabbit_log:warning("LDAP will retry with a new connection.~n"),
+                      with_login(Creds, Servers, Opts, Fun);
+                  Result -> Result
+              end
+      end, reuse).
+
+with_login(Creds, Servers, Opts, Fun) ->
+    case get_or_create_conn(Creds == anon, Servers, Opts) of
         {ok, LDAP} ->
-            try Creds of
+            case Creds of
                 anon ->
                     ?L1("anonymous bind", []),
                     Fun(LDAP);
                 {UserDN, Password} ->
                     case eldap:simple_bind(LDAP, UserDN, Password) of
                         ok ->
-                            ?L1("bind succeeded: ~s", [UserDN]),
+                            ?L1("bind succeeded: ~s",
+                                [scrub_dn(UserDN, env(log))]),
                             Fun(LDAP);
                         {error, invalidCredentials} ->
                             ?L1("bind returned \"invalid credentials\": ~s",
-                                [UserDN]),
+                                [scrub_dn(UserDN, env(log))]),
                             {refused, UserDN, []};
                         {error, E} ->
-                            ?L1("bind error: ~s ~p", [UserDN, E]),
+                            ?L1("bind error: ~s ~p",
+                                [scrub_dn(UserDN, env(log)), E]),
                             {error, E}
                     end
-            after
-                eldap:close(LDAP)
             end;
         Error ->
             ?L1("connect error: ~p", [Error]),
             Error
     end.
 
+%% Gets either the anonymous or bound (authenticated) connection
+get_or_create_conn(IsAnon, Servers, Opts) ->
+    Conns = case get(ldap_conns) of
+                undefined -> dict:new();
+                Dict      -> Dict
+            end,
+    Key = {IsAnon, Servers, Opts},
+    case dict:find(Key, Conns) of
+        {ok, Conn} -> Conn;
+        error      ->
+            case eldap_open(Servers, Opts) of
+                {ok, _} = Conn -> put(ldap_conns, dict:store(Key, Conn, Conns)), Conn;
+                Error -> Error
+            end
+    end.
+
+%% Get attribute(s) from eldap entry
+get_attributes(_AttrName, []) -> {error, not_found};
+get_attributes(AttrName, [#eldap_entry{attributes = A}|Rem]) ->
+    case pget(AttrName, A) of
+        [Attr|[]]                    -> Attr;
+        Attrs when length(Attrs) > 1 -> Attrs;
+        _                            -> get_attributes(AttrName, Rem)
+    end;
+get_attributes(AttrName, [_|Rem])    -> get_attributes(AttrName, Rem).
+
+%% Format multiple attribute values for logging
+format_multi_attr(Attrs) ->
+    format_multi_attr(io_lib:printable_list(Attrs), Attrs).
+
+format_multi_attr(true, Attrs)                     -> Attrs;
+format_multi_attr(_,    Attrs) when is_list(Attrs) -> string:join(Attrs, "; ");
+format_multi_attr(_,    Error)                     -> Error.
+
+
+%% In case of multiple attributes, check for equality bi-directionally
+is_multi_attr_member(Str1, Str2) ->
+    lists:member(Str1, Str2) orelse lists:member(Str2, Str1).
+
+purge_conn(IsAnon, Servers, Opts) ->
+    Conns = get(ldap_conns),
+    Key = {IsAnon, Servers, Opts},
+    {_, {_, Conn}} = dict:find(Key, Conns),
+    rabbit_log:warning("LDAP Purging an already closed LDAP server connection~n"),
+    % We cannot close the connection with eldap:close/1 because as of OTP-13327
+    % eldap will try to do_unbind first and will fail with a `{gen_tcp_error, closed}`.
+    % Since we know that the connection is already closed, we just
+    % kill its process.
+    unlink(Conn),
+    exit(Conn, closed),
+    put(ldap_conns, dict:erase(Key, Conns)).
+
 eldap_open(Servers, Opts) ->
     case eldap:open(Servers, ssl_conf() ++ Opts) of
         {ok, LDAP} ->
@@ -334,7 +497,17 @@ env(F) ->
     {ok, V} = application:get_env(rabbitmq_auth_backend_ldap, F),
     V.
 
+login_fun(User, UserDN, Password, AuthProps) ->
+    fun(L) -> case pget(vhost, AuthProps) of
+                  undefined -> do_login(User, UserDN, Password, L);
+                  VHost     -> do_login(User, UserDN, Password, VHost, L)
+              end
+    end.
+
 do_login(Username, PrebindUserDN, Password, LDAP) ->
+    do_login(Username, PrebindUserDN, Password, <<>>, LDAP).
+
+do_login(Username, PrebindUserDN, Password, VHost, LDAP) ->
     UserDN = case PrebindUserDN of
                  unknown -> username_to_dn(Username, LDAP, dn_lookup_when());
                  _       -> PrebindUserDN
@@ -342,30 +515,31 @@ do_login(Username, PrebindUserDN, Password, LDAP) ->
     User = #auth_user{username     = Username,
                       impl         = #impl{user_dn  = UserDN,
                                            password = Password}},
-    DTQ = fun (LDAPn) -> do_tag_queries(Username, UserDN, User, LDAPn) end,
+    DTQ = fun (LDAPn) -> do_tag_queries(Username, UserDN, User, VHost, LDAPn) end,
     TagRes = case env(other_bind) of
                  as_user -> DTQ(LDAP);
                  _       -> with_ldap(creds(User), DTQ)
              end,
     case TagRes of
-        {ok, L} -> case [E || {_, E = {error, _}} <- L] of
-                       []      -> Tags = [Tag || {Tag, true} <- L],
-                                  {ok, User#auth_user{tags = Tags}};
-                       [E | _] -> E
-                   end;
+        {ok, L} -> {ok, User#auth_user{tags = [Tag || {Tag, true} <- L]}};
         E       -> E
     end.
 
-do_tag_queries(Username, UserDN, User, LDAP) ->
+do_tag_queries(Username, UserDN, User, VHost, LDAP) ->
     {ok, [begin
               ?L1("CHECK: does ~s have tag ~s?", [Username, Tag]),
               R = evaluate(Q, [{username, Username},
-                               {user_dn,  UserDN}], User, LDAP),
+                               {user_dn,  UserDN} | vhost_if_defined(VHost)],
+                           User, LDAP),
               ?L1("DECISION: does ~s have tag ~s? ~p",
                   [Username, Tag, R]),
               {Tag, R}
           end || {Tag, Q} <- env(tag_queries)]}.
 
+vhost_if_defined([])    -> [];
+vhost_if_defined(<<>>)  -> [];
+vhost_if_defined(VHost) -> [{vhost, VHost}].
+
 dn_lookup_when() -> case {env(dn_lookup_attribute), env(dn_lookup_bind)} of
                         {none, _}       -> never;
                         {_,    as_user} -> postbind;
@@ -409,6 +583,58 @@ creds(#auth_user{impl = #impl{user_dn = UserDN, password = PW}}, as_user) ->
 creds(_, Creds) ->
     {ok, Creds}.
 
+%% Scrub credentials
+scrub_creds([], Acc)      -> lists:reverse(Acc);
+scrub_creds([H|Rem], Acc) ->
+    scrub_creds(Rem, [scrub_payload_creds(H)|Acc]).
+
+%% Scrub credentials from specific payloads
+scrub_payload_creds({'BindRequest', N, DN, {simple, _PWD}}) ->
+  {'BindRequest', N, scrub_dn(DN), {simple, ?SCRUBBED_CREDENTIAL}};
+scrub_payload_creds(Any) -> Any.
+
+scrub_dn(DN) -> scrub_dn(DN, network).
+
+scrub_dn(DN, network_unsafe) -> DN;
+scrub_dn(DN, false)          -> DN;
+scrub_dn(DN, _) ->
+    case is_dn(DN) of
+        true -> scrub_rdn(string:tokens(DN, ","), []);
+        _    ->
+            %% We aren't fully certain its a DN, & don't know what sensitive
+            %% info could be contained, thus just scrub the entire credential
+            ?SCRUBBED_CREDENTIAL
+    end.
+
+scrub_rdn([], Acc) ->
+    string:join(lists:reverse(Acc), ",");
+scrub_rdn([DN|Rem], Acc) ->
+    DN0 = case catch string:tokens(DN, "=") of
+              L = [RDN, _] -> case string:to_lower(RDN) of
+                                  "cn"  -> [RDN, ?SCRUBBED_CREDENTIAL];
+                                  "dc"  -> [RDN, ?SCRUBBED_CREDENTIAL];
+                                  "ou"  -> [RDN, ?SCRUBBED_CREDENTIAL];
+                                  "uid" -> [RDN, ?SCRUBBED_CREDENTIAL];
+                                  _     -> L
+                              end;
+              _Any ->
+                  %% There's no RDN, log "xxxx=xxxx"
+                  [?SCRUBBED_CREDENTIAL, ?SCRUBBED_CREDENTIAL]
+          end,
+  scrub_rdn(Rem, [string:join(DN0, "=")|Acc]).
+
+is_dn(S) when is_list(S) ->
+    case catch string:tokens(to_list(S), "=") of
+        L when length(L) > 1 -> true;
+        _                    -> false
+    end;
+is_dn(_S) -> false.
+
+to_list(S) when is_list(S)   -> S;
+to_list(S) when is_binary(S) -> binary_to_list(S);
+to_list(S) when is_atom(S)   -> atom_to_list(S);
+to_list(S)                   -> {error, {badarg, S}}.
+
 log(Fmt,  Args) -> case env(log) of
                        false -> ok;
                        _     -> rabbit_log:info(Fmt ++ "~n", Args)
similarity index 78%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_app.erl
rename to deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl
index e0cd7aa13dbc59ae64534172e8150de94095cbe5..e96a3772f7101181db81b6cf6af557736c899ed3 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_backend_ldap_app).
 
 %% Dummy supervisor to get this application behaviour working
 -behaviour(supervisor).
--export([init/1]).
+-export([create_ldap_pool/0, init/1]).
+
+-rabbit_boot_step({ldap_pool,
+                   [{description, "LDAP pool"},
+                    {mfa, {?MODULE, create_ldap_pool, []}}, 
+                    {requires, kernel_ready}]}).
+
+create_ldap_pool() ->
+    {ok, PoolSize} = application:get_env(rabbitmq_auth_backend_ldap, pool_size),
+    rabbit_sup:start_supervisor_child(ldap_pool_sup, worker_pool_sup, [PoolSize, ldap_pool]).
 
 start(_Type, _StartArgs) ->
     {ok, Backends} = application:get_env(rabbit, auth_backends),
@@ -51,3 +60,4 @@ configured(M,  [_    |T]) -> configured(M, T).
 %%----------------------------------------------------------------------------
 
 init([]) -> {ok, {{one_for_one, 3, 10}, []}}.
+
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_util.erl
rename to deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl
index 47c3d660ca9391f74e83039ed644d391c7fc39b6..86216ac73af2cb3943b29a196c7cd0a947b03cb8 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_backend_ldap_util).
similarity index 76%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbitmq_auth_backend_ldap.app.src
rename to deps/rabbitmq_auth_backend_ldap/src/rabbitmq_auth_backend_ldap.app.src
index b2139d66d4b7663d485d38518c38ebbcf9856f2e..0277d8440e724330dc003e479427017df38217c7 100644 (file)
@@ -1,7 +1,7 @@
 %% -*- erlang -*-
 {application, rabbitmq_auth_backend_ldap,
  [{description, "RabbitMQ LDAP Authentication Backend"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_auth_backend_ldap_app, []}},
@@ -9,6 +9,7 @@
           {user_dn_pattern,       "${username}"},
           {dn_lookup_attribute,   none},
           {dn_lookup_base,        none},
+          {group_lookup_base,     none},
           {dn_lookup_bind,        as_user},
           {other_bind,            as_user},
           {vhost_access_query,    {constant, true}},
@@ -17,7 +18,8 @@
           {use_ssl,               false},
           {use_starttls,          false},
           {ssl_options,           []},
-          {port,                  389},
+          {port,                  3890},
           {timeout,               infinity},
-          {log,                   false} ] },
-  {applications, [kernel, stdlib, eldap, rabbit]}]}.
+          {log,                   false},
+          {pool_size,             64} ] },
+  {applications, [kernel, stdlib, eldap, rabbit_common, rabbit]}]}.
diff --git a/deps/rabbitmq_auth_mechanism_ssl/CODE_OF_CONDUCT.md b/deps/rabbitmq_auth_mechanism_ssl/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_auth_mechanism_ssl/CONTRIBUTING.md b/deps/rabbitmq_auth_mechanism_ssl/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_auth_mechanism_ssl/Makefile b/deps/rabbitmq_auth_mechanism_ssl/Makefile
new file mode 100644 (file)
index 0000000..c10616c
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_auth_mechanism_ssl
+
+DEPS = rabbit_common rabbit
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
similarity index 75%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/README
rename to deps/rabbitmq_auth_mechanism_ssl/README.md
index 4eb4f14bc27502eecc6192cc3c1428fb41eecb3f..8d659ec64e86423dab25b42422c471caf33903d5 100644 (file)
@@ -1,3 +1,5 @@
+# x509 (TLS/SSL) certificate Authentication Mechanism for RabbitMQ
+
 Authenticates the user, obtaining the username from the client's
 SSL certificate. The user's password is not checked.
 
@@ -5,15 +7,19 @@ In order to use this mechanism the client must connect over SSL, and
 present a client certificate.
 
 The mechanism must also be enabled in RabbitMQ's configuration file -
-see http://www.rabbitmq.com/authentication.html for more details, or
+see [TLS Authentication guide](http://www.rabbitmq.com/authentication.html) for more details, or
 in short, ensure that the 'rabbit' section of your configuration
 contains:
 
+``` erlang
 {auth_mechanisms, ['PLAIN', 'AMQPLAIN', 'EXTERNAL']}
+```
 
 to allow this mechanism in addition to the defaults, or:
 
+``` erlang
 {auth_mechanisms, ['EXTERNAL']}
+```
 
 to allow only this mechanism.
 
@@ -27,18 +33,30 @@ produced by OpenSSL's "-nameopt RFC2253" option.
 
 You can obtain this string form from a certificate with a command like:
 
-$ openssl x509 -in path/to/cert.pem -nameopt RFC2253 -subject -noout
+```
+openssl x509 -in path/to/cert.pem -nameopt RFC2253 -subject -noout
+```
 
 or from an existing amqps connection with commands like:
 
-$ rabbitmqctl list_connections peer_cert_subject
+```
+rabbitmqctl list_connections peer_cert_subject
+```
 
 To use the Common Name instead, ensure that the 'rabbit' section of
 your configuration contains:
 
+```
 {ssl_cert_login_from, common_name}
+```
 
 Note that the authenticated user will then be looked up in the
 configured authentication / authorisation backend(s) - this will be
 the mnesia-based user database by default, but could include other
 backends if so configured.
+
+## Copyright & License
+
+(c) Pivotal Software Inc., 2007 â€” 2015.
+
+Released under the same license as RabbitMQ.
diff --git a/deps/rabbitmq_auth_mechanism_ssl/erlang.mk b/deps/rabbitmq_auth_mechanism_ssl/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk b/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl.erl
rename to deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl
index 47cfcab6e9f7ab87cd65c7a2f731b7e97fc59cc4..b7dac3fc635704abe6e144dfcda32ac83d69b290 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl_app.erl
rename to deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl
index 7f6eff95419ce796f0589749cdfe13a44f890935..159ccccb71dd54051a032e8ea67fbb1958b47769 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_auth_mechanism_ssl_app).
similarity index 76%
rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbitmq_auth_mechanism_ssl.app.src
rename to deps/rabbitmq_auth_mechanism_ssl/src/rabbitmq_auth_mechanism_ssl.app.src
index 400b3b04990dd0c13d9fce660236d9c7fd813e13..d40920c9789577c8fe33315a3bdabd2fbd230bc1 100644 (file)
@@ -1,9 +1,9 @@
 %% -*- erlang -*-
 {application, rabbitmq_auth_mechanism_ssl,
  [{description, "RabbitMQ SSL authentication (SASL EXTERNAL)"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_auth_mechanism_ssl_app, []}},
   {env, [{name_from, distinguished_name}] },
-  {applications, [kernel, stdlib]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit]}]}.
diff --git a/deps/rabbitmq_codegen/CODE_OF_CONDUCT.md b/deps/rabbitmq_codegen/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_codegen/CONTRIBUTING.md b/deps/rabbitmq_codegen/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
similarity index 64%
rename from rabbitmq-server/codegen/Makefile
rename to deps/rabbitmq_codegen/Makefile
index 8e028c8db9970339d072e52d02bc2214b0943eed..7b27ec776eaf26d5b655df2ce2613940b1ec8133 100644 (file)
@@ -1,5 +1,5 @@
 all:
-       @echo "Please select a target from the Makefile."
+       @:
 
 clean:
        rm -f *.pyc
similarity index 99%
rename from rabbitmq-server/codegen/amqp-rabbitmq-0.8.json
rename to deps/rabbitmq_codegen/amqp-rabbitmq-0.8.json
index 35f88566f7b61aaaef156105a9a66f9b1bc40ffd..09504151f6db6ff20683983a9aae30bd8737a1da 100644 (file)
@@ -4,7 +4,7 @@
     "minor-version": 0,
     "port": 5672,
     "copyright": [
-        "Copyright (C) 2008-2013 GoPivotal, Inc.\n",
+        "Copyright (C) 2008-2016 Pivotal Software, Inc, Inc.\n",
         "\n",
         "Permission is hereby granted, free of charge, to any person\n",
         "obtaining a copy of this file (the \"Software\"), to deal in the\n",
similarity index 99%
rename from rabbitmq-server/codegen/amqp-rabbitmq-0.9.1.json
rename to deps/rabbitmq_codegen/amqp-rabbitmq-0.9.1.json
index 0c3ee2af92bc85150907c24a35d5ef3d9b0e175e..2f4b10a6556db43a0b86e1a18cb7a9fc96a18e3c 100644 (file)
@@ -5,7 +5,7 @@
     "revision": 1,
     "port": 5672,
     "copyright": [
-        "Copyright (C) 2008-2013 GoPivotal, Inc.\n",
+        "Copyright (C) 2008-2016 Pivotal Software, Inc, Inc.\n",
         "\n",
         "Permission is hereby granted, free of charge, to any person\n",
         "obtaining a copy of this file (the \"Software\"), to deal in the\n",
similarity index 92%
rename from rabbitmq-server/codegen/amqp_codegen.py
rename to deps/rabbitmq_codegen/amqp_codegen.py
index 2623a5d426c94418fc51cc7c27800cc653623e6b..e4de0e0a7390d7bcc1afddb4f839c92819a5c303 100644 (file)
@@ -15,6 +15,7 @@
 ##
 
 from __future__ import nested_scopes
+import errno
 import re
 import sys
 import os
@@ -23,7 +24,7 @@ from optparse import OptionParser
 try:
     try:
         import simplejson as json
-    except ImportError, e:
+    except ImportError as e:
         if sys.hexversion >= 0x20600f0:
             import json
         else:
@@ -63,13 +64,13 @@ def extension_info_merger(key, acc, new, ignore_conflicts):
 def domains_merger(key, acc, new, ignore_conflicts):
     merged = dict((k, v) for [k, v] in acc)
     for [k, v] in new:
-        if merged.has_key(k):
+        if k in merged:
             if not ignore_conflicts:
                 raise AmqpSpecFileMergeConflict(key, acc, new)
         else:
             merged[k] = v
 
-    return [[k, v] for (k, v) in merged.iteritems()]
+    return [[k, v] for (k, v) in merged.items()]
 
 def merge_dict_lists_by(dict_key, acc, new, ignore_conflicts):
     acc_index = set(v[dict_key] for v in acc)
@@ -123,12 +124,12 @@ def merge_load_specs(filenames, ignore_conflicts):
     docs = [json.load(handle) for handle in handles]
     spec = {}
     for doc in docs:
-        for (key, value) in doc.iteritems():
+        for (key, value) in doc.items():
             (merger, default_value) = mergers.get(key, (default_spec_value_merger, None))
             spec[key] = merger(key, spec.get(key, default_value), value, ignore_conflicts)
     for handle in handles: handle.close()
     return spec
-        
+
 class AmqpSpec:
     # Slight wart: use a class member rather than change the ctor signature
     # to avoid breaking everyone else's code.
@@ -139,7 +140,7 @@ class AmqpSpec:
 
         self.major = self.spec['major-version']
         self.minor = self.spec['minor-version']
-        self.revision = self.spec.has_key('revision') and self.spec['revision'] or 0
+        self.revision = 'revision' in self.spec and self.spec['revision'] or 0
         self.port =  self.spec['port']
 
         self.domains = {}
@@ -149,7 +150,7 @@ class AmqpSpec:
 
         self.constants = []
         for d in self.spec['constants']:
-            if d.has_key('class'):
+            if 'class' in d:
                 klass = d['class']
             else:
                 klass = ''
@@ -158,10 +159,10 @@ class AmqpSpec:
         self.classes = []
         for element in self.spec['classes']:
             self.classes.append(AmqpClass(self, element))
-        
+
     def allClasses(self):
         return self.classes
-    
+
     def allMethods(self):
         return [m for c in self.classes for m in c.allMethods()]
 
@@ -172,7 +173,7 @@ class AmqpEntity:
     def __init__(self, element):
         self.element = element
         self.name = element['name']
-    
+
 class AmqpClass(AmqpEntity):
     def __init__(self, spec, element):
         AmqpEntity.__init__(self, element)
@@ -190,12 +191,12 @@ class AmqpClass(AmqpEntity):
                 break
 
         self.fields = []
-        if self.element.has_key('properties'):
+        if 'properties' in self.element:
             index = 0
             for e in self.element['properties']:
                 self.fields.append(AmqpField(self, e, index))
                 index = index + 1
-            
+
     def allMethods(self):
         return self.methods
 
@@ -207,11 +208,11 @@ class AmqpMethod(AmqpEntity):
         AmqpEntity.__init__(self, element)
         self.klass = klass
         self.index = int(self.element['id'])
-        if self.element.has_key('synchronous'):
+        if 'synchronous' in self.element:
             self.isSynchronous = self.element['synchronous']
         else:
             self.isSynchronous = False
-        if self.element.has_key('content'):
+        if 'content' in self.element:
             self.hasContent = self.element['content']
         else:
             self.hasContent = False
@@ -221,7 +222,7 @@ class AmqpMethod(AmqpEntity):
         for argument in element['arguments']:
             self.arguments.append(AmqpField(self, argument, index))
             index = index + 1
-        
+
     def __repr__(self):
         return 'AmqpMethod("' + self.klass.name + "." + self.name + '" ' + repr(self.arguments) + ')'
 
@@ -231,12 +232,12 @@ class AmqpField(AmqpEntity):
         self.method = method
         self.index = index
 
-        if self.element.has_key('type'):
+        if 'type' in self.element:
             self.domain = self.element['type']
         else:
             self.domain = self.element['domain']
-            
-        if self.element.has_key('default-value'):
+
+        if 'default-value' in self.element:
             self.defaultvalue = self.element['default-value']
         else:
             self.defaultvalue = None
@@ -253,8 +254,18 @@ def do_main_dict(funcDict):
         print >> sys.stderr , "  %s <function> <path_to_amqp_spec.json>... <path_to_output_file>" % (sys.argv[0])
         print >> sys.stderr , " where <function> is one of %s" % ", ".join([k for k in funcDict.keys()])
 
+    def mkdir_p(path):
+        try:
+            os.makedirs(path)
+        except OSError as exc:  # Python >2.5
+            if exc.errno == errno.EEXIST and os.path.isdir(path):
+                pass
+            else:
+                raise
+
     def execute(fn, amqp_specs, out_file):
         stdout = sys.stdout
+        mkdir_p(os.path.dirname(out_file))
         f = open(out_file, 'w')
         success = False
         try:
@@ -279,7 +290,7 @@ def do_main_dict(funcDict):
         sources = args[1:-1]
         dest = args[-1]
         AmqpSpec.ignore_conflicts = options.ignore_conflicts
-        if funcDict.has_key(function):
+        if function in funcDict:
             execute(funcDict[function], sources, dest)
         else:
             usage()
similarity index 97%
rename from rabbitmq-server/codegen/credit_extension.json
rename to deps/rabbitmq_codegen/credit_extension.json
index b74391feb3a6c97be76b5ca5c433bc27ecad0658..dd4805c22ebb27d24a9bf8ae689693e3679f212d 100644 (file)
@@ -9,7 +9,7 @@
             "hence you are strongly discouraged from building clients ",
             "which use it."],
         "copyright": [
-            "Copyright (C) 2008-2013 GoPivotal, Inc.\n",
+            "Copyright (C) 2008-2016 Pivotal Software, Inc, Inc.\n",
             "\n",
             "Permission is hereby granted, free of charge, to any person\n",
             "obtaining a copy of this file (the \"Software\"), to deal in the\n",
diff --git a/deps/rabbitmq_codegen/license_info b/deps/rabbitmq_codegen/license_info
new file mode 100644 (file)
index 0000000..a703cbd
--- /dev/null
@@ -0,0 +1,4 @@
+The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are
+"Copyright (C) 2008-2016 Pivotal Software, Inc", Inc. and are covered by the MIT
+license.
+
diff --git a/deps/rabbitmq_consistent_hash_exchange/CODE_OF_CONDUCT.md b/deps/rabbitmq_consistent_hash_exchange/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_consistent_hash_exchange/CONTRIBUTING.md b/deps/rabbitmq_consistent_hash_exchange/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE-MPL-RabbitMQ
rename to deps/rabbitmq_consistent_hash_exchange/LICENSE-MPL-RabbitMQ
index 99428fe44a5810561cdcf929b51d917bc73ff83a..9faaa4ee19902e3e01170b548a3cfce13b59e880 100644 (file)
@@ -447,7 +447,7 @@ EXHIBIT A -Mozilla Public License.
      The Original Code is RabbitMQ Consistent Hash Exchange.
 
      The Initial Developer of the Original Code is GoPivotal, Inc.
-     Copyright (c) 2011-2014 GoPivotal, Inc.  All rights reserved.''
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
 
      [NOTE: The text of this Exhibit A may differ slightly from the text of
      the notices in the Source Code files of the Original Code. You should
diff --git a/deps/rabbitmq_consistent_hash_exchange/Makefile b/deps/rabbitmq_consistent_hash_exchange/Makefile
new file mode 100644 (file)
index 0000000..b9c1f08
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_consistent_hash_exchange
+
+DEPS = rabbit_common rabbit
+TEST_DEPS = rabbitmq_ct_helpers amqp_client
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_consistent_hash_exchange/README.md b/deps/rabbitmq_consistent_hash_exchange/README.md
new file mode 100644 (file)
index 0000000..e3e82ce
--- /dev/null
@@ -0,0 +1,199 @@
+# RabbitMQ Consistent Hash Exchange Type
+
+## What it Does
+
+This plugin adds a consistent-hash exchange type to RabbitMQ.
+
+In various scenarios, you may wish to ensure that messages sent to an
+exchange are consistently and equally distributed across a number of
+different queues based on the routing key of the message, a nominated
+header  (see "Routing on a header" below), or a message property (see
+"Routing on a message property" below). You could arrange for this to
+occur yourself by using a  direct  or topic exchange, binding queues
+to that exchange and then publishing messages to that exchange that
+match the various binding keys.
+
+However, arranging things this way can be problematic:
+
+1. It is difficult to ensure that all queues bound to the exchange
+will receive a (roughly) equal number of messages without baking in to
+the publishers quite a lot of knowledge about the number of queues and
+their bindings.
+
+2. If the number of queues changes, it is not easy to ensure that the
+new topology still distributes messages between the different queues
+evenly.
+
+[Consistent Hashing](http://en.wikipedia.org/wiki/Consistent_hashing)
+is a hashing technique whereby each bucket appears at multiple points
+throughout the hash space, and the bucket selected is the nearest
+higher (or lower, it doesn't matter, provided it's consistent) bucket
+to the computed hash (and the hash space wraps around). The effect of
+this is that when a new bucket is added or an existing bucket removed,
+only a very few hashes change which bucket they are routed to.
+
+## How It Works
+
+In the case of Consistent Hashing as an exchange type, the hash is
+calculated from the hash of the routing key of each message
+received. Thus messages that have the same routing key will have the
+same hash computed, and thus will be routed to the same queue,
+assuming no bindings have changed.
+
+When you bind a queue to a consistent-hash exchange, the binding key
+is a number-as-a-string which indicates the number of points in the
+hash space at which you wish the queue to appear. The actual points
+are generated randomly.
+
+The hashing distributes *routing keys* among queues, not *messages*
+among queues; all messages with the same routing key will go the
+same queue.  So, if you wish for queue A to receive twice as many
+routing keys routed to it than are routed to queue B, then you bind
+the queue A with a binding key of twice the number (as a string --
+binding keys are always strings) of the binding key of the binding
+to queue B.  Note this is only the case if your routing keys are
+evenly distributed in the hash space.  If, for example, only two
+distinct routing keys are used on all the messages, there's a chance
+both keys will route (consistently!) to the same queue, even though
+other queues have higher values in their binding key.  With a larger
+set of routing keys used, the statistical distribution of routing
+keys approaches the ratios of the binding keys.
+
+Each message gets delivered to at most one queue. Normally, each
+message gets delivered to exactly one queue, but there is a race
+between the determination of which queue to send a message to, and the
+deletion/death of that queue that does permit the possibility of the
+message being sent to a queue which then disappears before the message
+is processed. Hence in general, at most one queue.
+
+The exchange type is "x-consistent-hash".
+
+## Supported RabbitMQ Versions
+
+This plugin supports RabbitMQ 3.3.x and later versions.
+
+
+## Examples
+
+### Erlang
+
+Here is an example using the Erlang client:
+
+```erlang
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+test() ->
+    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
+    {ok, Chan} = amqp_connection:open_channel(Conn),
+    Queues = [<<"q0">>, <<"q1">>, <<"q2">>, <<"q3">>],
+    amqp_channel:call(Chan,
+                  #'exchange.declare' {
+                    exchange = <<"e">>, type = <<"x-consistent-hash">>
+                  }),
+    [amqp_channel:call(Chan, #'queue.declare' { queue = Q }) || Q <- Queues],
+    [amqp_channel:call(Chan, #'queue.bind' { queue = Q,
+                                             exchange = <<"e">>,
+                                             routing_key = <<"10">> })
+        || Q <- [<<"q0">>, <<"q1">>]],
+    [amqp_channel:call(Chan, #'queue.bind' { queue = Q,
+                                             exchange = <<"e">>,
+                                             routing_key = <<"20">> })
+        || Q <- [<<"q2">>, <<"q3">>]],
+    Msg = #amqp_msg { props = #'P_basic'{}, payload = <<>> },
+    [amqp_channel:call(Chan,
+                   #'basic.publish'{
+                     exchange = <<"e">>,
+                     routing_key = list_to_binary(
+                                     integer_to_list(
+                                       random:uniform(1000000)))
+                   }, Msg) || _ <- lists:seq(1,100000)],
+amqp_connection:close(Conn),
+ok.
+```
+
+As you can see, the queues `q0` and `q1` get bound each with 10 points
+in the hash space to the exchange `e` which means they'll each get
+roughly the same number of routing keys. The queues `q2` and `q3`
+however, get 20 points each which means they'll each get roughly the
+same number of routing keys too, but that will be approximately twice
+as many as `q0` and `q1`. We then publish 100,000 messages to our
+exchange with random routing keys, the queues will get their share of
+messages roughly equal to the binding keys ratios. After this has
+completed, running `rabbitmqctl list_queues` should show that the
+messages have been distributed approximately as desired.
+
+Note the `routing_key`s in the bindings are numbers-as-strings. This
+is because AMQP specifies the routing_key must be a string.
+
+The more points in the hash space each binding has, the closer the
+actual distribution will be to the desired distribution (as indicated
+by the ratio of points by binding). However, large numbers of points
+(many thousands) will substantially decrease performance of the
+exchange type.
+
+Equally, it is important to ensure that the messages being published
+to the exchange have a range of different `routing_key`s: if a very
+small set of routing keys are being used then there's a possibility of
+messages not being evenly distributed between the various queues. If
+the routing key is a pseudo-random session ID or such, then good
+results should follow.
+
+## Routing on a header
+
+Under most circumstances the routing key is a good choice for something to
+hash. However, in some cases you need to use the routing key for some other
+purpose (for example with more complex routing involving exchange to
+exchange bindings). In this case you can configure the consistent hash
+exchange to route based on a named header instead. To do this, declare the
+exchange with a string argument called "hash-header" naming the header to
+be used. For example using the Erlang client as above:
+
+```erlang
+    amqp_channel:call(
+      Chan, #'exchange.declare' {
+              exchange  = <<"e">>,
+              type      = <<"x-consistent-hash">>,
+              arguments = [{<<"hash-header">>, longstr, <<"hash-me">>}]
+            }).
+```
+
+If you specify "hash-header" and then publish messages without the named
+header, they will all get routed to the same (arbitrarily-chosen) queue.
+
+## Routing on a message property
+
+In addition to a value in the header property, you can also route on the
+``message_id``, ``correlation_id``, or ``timestamp`` message property. To do so,
+declare the exchange with a string argument called "hash-property" naming the
+property to be used. For example using the Erlang client as above:
+
+```erlang
+    amqp_channel:call(
+      Chan, #'exchange.declare' {
+              exchange  = <<"e">>,
+              type      = <<"x-consistent-hash">>,
+              arguments = [{<<"hash-property">>, longstr, <<"message_id">>}]
+            }).
+```
+
+Note that you can not declare an exchange that routes on both "hash-header" and
+"hash-property". If you specify "hash-property" and then publish messages without
+a value in the named property, they will all get routed to the same
+(arbitrarily-chosen) queue.
+
+## Getting Help
+
+Any comments or feedback welcome, to the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+## Continuous Integration
+
+[![Build Status](https://travis-ci.org/rabbitmq/rabbitmq-consistent-hash-exchange.svg?branch=master)](https://travis-ci.org/rabbitmq/rabbitmq-consistent-hash-exchange)
+
+## Copyright and License
+
+(c) 2013-2015 Pivotal Software Inc.
+
+Released under the Mozilla Public License 1.1, same as RabbitMQ.
+See [LICENSE](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/blob/master/LICENSE) for
+details.
diff --git a/deps/rabbitmq_consistent_hash_exchange/erlang.mk b/deps/rabbitmq_consistent_hash_exchange/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk b/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 73%
rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbit_exchange_type_consistent_hash.erl
rename to deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl
index 36b4cf4518b4b2bd7b34b6c683de1cbb63cc257b..4113f0157cf399197de089fcc3bf5f870710a7c1 100644 (file)
 %% The Original Code is RabbitMQ Consistent Hash Exchange.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2011-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_exchange_type_consistent_hash).
 -include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
 
 -behaviour(rabbit_exchange_type).
 
@@ -46,6 +47,7 @@
 
 -define(TABLE, ?MODULE).
 -define(PHASH2_RANGE, 134217728). %% 2^27
+-define(PROPERTIES, [<<"correlation_id">>, <<"message_id">>, <<"timestamp">>]).
 
 description() ->
     [{description, <<"Consistent Hashing Exchange">>}].
@@ -67,8 +69,7 @@ route(#exchange { name      = Name,
     %% end up as relatively deep data structures which cost a lot to
     %% continually copy to the process heap. Consequently, such
     %% approaches have not been found to be much faster, if at all.
-    HashOn = rabbit_misc:table_lookup(Args, <<"hash-header">>),
-    H = erlang:phash2(hash(HashOn, Msg), ?PHASH2_RANGE),
+    H = erlang:phash2(hash(hash_on(Args), Msg), ?PHASH2_RANGE),
     case ets:select(?TABLE, [{#bucket { source_number = {Name, '$2'},
                                         destination   = '$1',
                                         _             = '_' },
@@ -84,7 +85,25 @@ route(#exchange { name      = Name,
             Destinations
     end.
 
-validate(_X) -> ok.
+validate(#exchange { arguments = Args }) ->
+    case hash_args(Args) of
+        {undefined, undefined} ->
+            ok;
+        {undefined, {_Type, Value}} ->
+            case lists:member(Value, ?PROPERTIES) of
+                true  -> ok;
+                false ->
+                    rabbit_misc:protocol_error(precondition_failed,
+                                               "Unsupported property: ~s",
+                                               [Value])
+            end;
+        {_, undefined} ->
+            ok;
+        {_, _} ->
+            rabbit_misc:protocol_error(precondition_failed,
+                                       "hash-header and hash-property are mutually exclusive",
+                                       [])
+    end.
 
 validate_binding(_X, #binding { key = K }) ->
     try
@@ -160,7 +179,7 @@ init() ->
 find_numbers(_Source, 0, Acc) ->
     Acc;
 find_numbers(Source, N, Acc) ->
-    Number = random:uniform(?PHASH2_RANGE) - 1,
+    Number = rand_compat:uniform(?PHASH2_RANGE) - 1,
     case mnesia:read(?TABLE, {Source, Number}, write) of
         []  -> find_numbers(Source, N-1, [Number | Acc]);
         [_] -> find_numbers(Source, N, Acc)
@@ -168,9 +187,43 @@ find_numbers(Source, N, Acc) ->
 
 hash(undefined, #basic_message { routing_keys = Routes }) ->
     Routes;
-hash({longstr, Header}, #basic_message { content = Content }) ->
+hash({header, Header}, #basic_message { content = Content }) ->
     Headers = rabbit_basic:extract_headers(Content),
     case Headers of
         undefined -> undefined;
         _         -> rabbit_misc:table_lookup(Headers, Header)
+    end;
+hash({property, Property}, #basic_message { content = Content }) ->
+    #content{properties = #'P_basic'{ correlation_id = CorrId,
+                                      message_id     = MsgId,
+                                      timestamp      = Timestamp }} =
+        rabbit_binary_parser:ensure_content_decoded(Content),
+    case Property of
+        <<"correlation_id">> -> CorrId;
+        <<"message_id">>     -> MsgId;
+        <<"timestamp">>      ->
+            case Timestamp of
+                undefined -> undefined;
+                _         -> integer_to_binary(Timestamp)
+            end
+    end.
+
+hash_args(Args) ->
+    Header =
+        case rabbit_misc:table_lookup(Args, <<"hash-header">>) of
+            undefined     -> undefined;
+            {longstr, V1} -> {header, V1}
+        end,
+    Property =
+        case rabbit_misc:table_lookup(Args, <<"hash-property">>) of
+            undefined     -> undefined;
+            {longstr, V2} -> {property, V2}
+        end,
+    {Header, Property}.
+
+hash_on(Args) ->
+    case hash_args(Args) of
+        {undefined, undefined} -> undefined;
+        {Header, undefined}    -> Header;
+        {undefined, Property}  -> Property
     end.
similarity index 65%
rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbitmq_consistent_hash_exchange.app.src
rename to deps/rabbitmq_consistent_hash_exchange/src/rabbitmq_consistent_hash_exchange.app.src
index ab7aab494631201a996ffbbd8633420bbc6a622e..4d9618c3e2bdb393d2cc1b4e212b483cb4e587e4 100644 (file)
@@ -1,7 +1,7 @@
 {application, rabbitmq_consistent_hash_exchange,
  [{description, "Consistent Hash Exchange Type"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {env, []},
-  {applications, [rabbit]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit]}]}.
diff --git a/deps/rabbitmq_event_exchange/CODE_OF_CONDUCT.md b/deps/rabbitmq_event_exchange/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_event_exchange/CONTRIBUTING.md b/deps/rabbitmq_event_exchange/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_event_exchange/LICENSE b/deps/rabbitmq_event_exchange/LICENSE
new file mode 100644 (file)
index 0000000..d810614
--- /dev/null
@@ -0,0 +1,5 @@
+This package, the RabbitMQ Event Exchange is licensed under
+the MPL. For the MPL, please see LICENSE-MPL-RabbitMQ.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ
new file mode 100644 (file)
index 0000000..9faaa4e
--- /dev/null
@@ -0,0 +1,455 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the NPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is RabbitMQ Consistent Hash Exchange.
+
+     The Initial Developer of the Original Code is GoPivotal, Inc.
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
+
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
diff --git a/deps/rabbitmq_event_exchange/Makefile b/deps/rabbitmq_event_exchange/Makefile
new file mode 100644 (file)
index 0000000..ea0157f
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_event_exchange
+
+DEPS = rabbit_common rabbit
+TEST_DEPS = rabbitmq_ct_helpers amqp_client
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_event_exchange/README.md b/deps/rabbitmq_event_exchange/README.md
new file mode 100644 (file)
index 0000000..9878bdc
--- /dev/null
@@ -0,0 +1,136 @@
+# RabbitMQ Event Exchange
+
+## Overview
+
+This plugin exposes the internal RabbitMQ event mechanism as messages that clients
+can consume. It's useful
+if you want to keep track of certain events, e.g. when queues, exchanges, bindings, users,
+connections, channels are created and deleted. This plugin filters out stats
+events, so you are almost certainly going to get better results using
+the management plugin for stats.
+
+## How it Works
+
+It declares a topic exchange called 'amq.rabbitmq.event' in the default
+virtual host. All events are published to this exchange with routing
+keys like 'exchange.created', 'binding.deleted' etc, so you can
+subscribe to only the events you're interested in.
+
+The exchange behaves similarly to 'amq.rabbitmq.log': everything gets
+published there; if you don't trust a user with the information that
+gets published, don't allow them access.
+
+The plugin requires no configuration, just activate it:
+
+    rabbitmq-plugins enable rabbitmq_event_exchange
+
+
+## Downloading
+
+You can download a pre-built binary of this plugin from
+the [RabbitMQ Community Plugins](http://www.rabbitmq.com/community-plugins.html) page.
+
+
+## Building
+
+Building is no different from [building other RabbitMQ plugins](http://www.rabbitmq.com/plugin-development.html).
+
+TL;DR:
+
+    git clone https://github.com.com/rabbitmq/rabbitmq-public-umbrella.git
+    cd rabbitmq-public-umbrella
+    make co
+    git clone https://github.com/rabbitmq/rabbitmq-event-exchange.git
+    cd rabbitmq-event-exchange
+    make -j
+
+## Event format
+
+Each event has various properties associated with it. These are
+translated into AMQP 0-9-1 data encoding and inserted in the message headers. The
+**message body is always blank**.
+
+## Events
+
+So far RabbitMQ and related plugins emit events with the following routing keys:
+
+### RabbitMQ Broker
+
+Queue, Exchange and Binding events:
+
+- `queue.deleted`
+- `queue.created`
+- `exchange.created`
+- `exchange.deleted`
+- `binding.created`
+- `binding.deleted`
+
+Connection and Channel events:
+
+- `connection.created`
+- `connection.closed`
+- `channel.created`
+- `channel.closed`
+
+Consumer events:
+
+- `consumer.created`
+- `consumer.deleted`
+
+Policy and Parameter events:
+
+- `policy.set`
+- `policy.cleared`
+- `parameter.set`
+- `parameter.cleared`
+
+Virtual host events:
+
+- `vhost.created`
+- `vhost.deleted`
+
+User related events:
+
+- `user.authentication.success`
+- `user.authentication.failure`
+- `user.created`
+- `user.deleted`
+- `user.password.changed`
+- `user.password.cleared`
+- `user.tags.set`
+
+Permission events:
+
+- `permission.created`
+- `permission.deleted`
+
+### Shovel Plugin
+
+Worker events:
+
+- `shovel.worker.status`
+- `shovel.worker.removed`
+
+### Federation Plugin
+
+Link events:
+
+- `federation.link.status`
+- `federation.link.removed`
+
+## Example
+
+There is a usage example using the Java client in `examples/java`.
+
+## Uninstalling
+
+If you want to remove the exchange which this plugin creates, first
+disable the plugin and restart the broker. Then you can delete the exchange,
+e.g. with :
+
+    rabbitmqctl eval 'rabbit_exchange:delete(rabbit_misc:r(<<"/">>, exchange, <<"amq.rabbitmq.event">>), false).'
+
+## License
+
+Released under the Mozilla Public License 1.1,
+the same as RabbitMQ.
diff --git a/deps/rabbitmq_event_exchange/erlang.mk b/deps/rabbitmq_event_exchange/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_event_exchange/examples/java/QueueEvents.java b/deps/rabbitmq_event_exchange/examples/java/QueueEvents.java
new file mode 100644 (file)
index 0000000..aca953c
--- /dev/null
@@ -0,0 +1,43 @@
+
+import com.rabbitmq.client.AMQP;
+import com.rabbitmq.client.Channel;
+import com.rabbitmq.client.Connection;
+import com.rabbitmq.client.ConnectionFactory;
+import com.rabbitmq.client.Consumer;
+import com.rabbitmq.client.DefaultConsumer;
+import com.rabbitmq.client.Envelope;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class QueueEvents {
+    public static void main(String[] args) throws IOException, InterruptedException {
+        ConnectionFactory f = new ConnectionFactory();
+        Connection c = f.newConnection();
+        Channel ch = c.createChannel();
+        String q = ch.queueDeclare().getQueue();
+        ch.queueBind(q, "amq.rabbitmq.event", "queue.*");
+
+        Consumer consumer = new DefaultConsumer(ch) {
+            @Override
+            public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
+                String event = envelope.getRoutingKey();
+                Map<String, Object> headers = properties.getHeaders();
+                String name = headers.get("name").toString();
+                String vhost = headers.get("vhost").toString();
+
+                if (event.equals("queue.created")) {
+                    boolean durable = (Boolean) headers.get("durable");
+                    String durableString = durable ? " (durable)" : " (transient)";
+                    System.out.println("Created: " + name + " in " + vhost + durableString);
+                }
+                else /* queue.deleted is the only other possibility */ {
+                    System.out.println("Deleted: " + name + " in " + vhost);
+                }
+            }
+        };
+        ch.basicConsume(q, true, consumer);
+        System.out.println("QUEUE EVENTS");
+        System.out.println("============\n");
+    }
+}
diff --git a/deps/rabbitmq_event_exchange/rabbitmq-components.mk b/deps/rabbitmq_event_exchange/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl b/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl
new file mode 100644 (file)
index 0000000..978b335
--- /dev/null
@@ -0,0 +1,131 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_exchange_type_event).
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+
+-export([register/0, unregister/0]).
+-export([init/1, handle_call/2, handle_event/2, handle_info/2,
+         terminate/2, code_change/3]).
+
+-export([fmt_proplist/1]). %% testing
+
+-define(EXCH_NAME, <<"amq.rabbitmq.event">>).
+
+-rabbit_boot_step({rabbit_event_exchange,
+                   [{description, "event exchange"},
+                    {mfa,         {?MODULE, register, []}},
+                    {cleanup,     {?MODULE, unregister, []}},
+                    {requires,    recovery},
+                    {enables,     routing_ready}]}).
+
+%%----------------------------------------------------------------------------
+
+register() ->
+    rabbit_exchange:declare(exchange(), topic, true, false, true, []),
+    gen_event:add_handler(rabbit_event, ?MODULE, []).
+
+unregister() ->
+    gen_event:delete_handler(rabbit_event, ?MODULE, []).
+
+exchange() ->
+    VHost = ensure_vhost_exists(),
+    rabbit_misc:r(VHost, exchange, ?EXCH_NAME).
+
+%%----------------------------------------------------------------------------
+
+init([]) -> {ok, []}.
+
+handle_call(_Request, State) -> {ok, not_understood, State}.
+
+handle_event(#event{type      = Type,
+                    props     = Props,
+                    timestamp = TS,
+                    reference = none}, State) ->
+    case key(Type) of
+        ignore -> ok;
+        Key    ->
+                  Props2 = [{<<"timestamp_in_ms">>, TS} | Props],
+                  PBasic = #'P_basic'{delivery_mode = 2,
+                                      headers = fmt_proplist(Props2),
+                                      %% 0-9-1 says the timestamp is a
+                                      %% "64 bit POSIX
+                                      %% timestamp". That's second
+                                      %% resolution, not millisecond.
+                                      timestamp = time_compat:convert_time_unit(
+                                                    TS, milli_seconds, seconds)},
+                  Msg = rabbit_basic:message(exchange(), Key, PBasic, <<>>),
+                  rabbit_basic:publish(
+                    rabbit_basic:delivery(false, false, Msg, undefined))
+    end,
+    {ok, State};
+handle_event(_Event, State) ->
+    {ok, State}.
+
+handle_info(_Info, State) -> {ok, State}.
+
+terminate(_Arg, _State) -> ok.
+
+code_change(_OldVsn, State, _Extra) -> {ok, State}.
+
+%%----------------------------------------------------------------------------
+
+ensure_vhost_exists() ->
+    VHost = case application:get_env(rabbitmq_event_exchange, vhost) of
+                undefined ->
+                    {ok, V} = application:get_env(rabbit, default_vhost),
+                    V;
+                {ok, V} ->
+                    V
+            end,
+    case rabbit_vhost:exists(VHost) of
+        false -> rabbit_vhost:add(VHost);
+        _     -> ok
+    end,
+    VHost.
+
+key(S) ->
+    case string:tokens(atom_to_list(S), "_") of
+        [_, "stats"] -> ignore;
+        Tokens       -> list_to_binary(string:join(Tokens, "."))
+    end.
+
+fmt_proplist(Props) ->
+    lists:append([fmt(a2b(K), V) || {K, V} <- Props]).
+
+fmt(K, #resource{virtual_host = VHost, 
+                 name         = Name}) -> [{K,           longstr, Name},
+                                           {<<"vhost">>, longstr, VHost}];
+fmt(K, V) -> {T, Enc} = fmt(V),
+             [{K, T, Enc}].
+
+fmt(true)                 -> {bool, true};
+fmt(false)                -> {bool, false};
+fmt(V) when is_atom(V)    -> {longstr, a2b(V)};
+fmt(V) when is_integer(V) -> {long, V};
+fmt(V) when is_number(V)  -> {float, V};
+fmt(V) when is_binary(V)  -> {longstr, V};
+fmt([{_, _}|_] = Vs)      -> {table, fmt_proplist(Vs)};
+fmt(Vs) when is_list(Vs)  -> {array, [fmt(V) || V <- Vs]};
+fmt(V) when is_pid(V)     -> {longstr,
+                              list_to_binary(rabbit_misc:pid_to_string(V))};
+fmt(V)                    -> {longstr,
+                              list_to_binary(
+                                rabbit_misc:format("~1000000000p", [V]))}.
+
+a2b(A) when is_atom(A)   -> list_to_binary(atom_to_list(A));
+a2b(B) when is_binary(B) -> B.
diff --git a/deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src b/deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src
new file mode 100644 (file)
index 0000000..5fdc6a4
--- /dev/null
@@ -0,0 +1,7 @@
+{application, rabbitmq_event_exchange,
+ [{description, "Event Exchange Type"},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {env, []},
+  {applications, [kernel, stdlib, rabbit_common, rabbit]}]}.
diff --git a/deps/rabbitmq_federation/CODE_OF_CONDUCT.md b/deps/rabbitmq_federation/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_federation/CONTRIBUTING.md b/deps/rabbitmq_federation/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_federation/Makefile b/deps/rabbitmq_federation/Makefile
new file mode 100644 (file)
index 0000000..0da1e80
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_federation
+
+DEPS = rabbit_common rabbit amqp_client
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_federation/erlang.mk b/deps/rabbitmq_federation/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_federation/etc/setup-rabbit-test.sh b/deps/rabbitmq_federation/etc/setup-rabbit-test.sh
new file mode 100755 (executable)
index 0000000..2e2282e
--- /dev/null
@@ -0,0 +1,2 @@
+#!/bin/sh -e
+sh -e `dirname $0`/rabbit-test.sh "$DEPS_DIR/rabbit/scripts/rabbitmqctl -n $RABBITMQ_NODENAME"
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/include/rabbit_federation.hrl
rename to deps/rabbitmq_federation/include/rabbit_federation.hrl
index 0995cfd68cf8179ea446f63db543e5b40cb1eb91..5081eca62be9d8ff6fe9b8d42d8c10c3726a9da6 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -record(upstream, {uris,
diff --git a/deps/rabbitmq_federation/rabbitmq-components.mk b/deps/rabbitmq_federation/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_app.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_app.erl
index 119ef60a33f979f76b1130709dfe2bd3a6699559..28e0aae0a138aaa64377298c16e9b3e5291e8d60 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_app).
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_db.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_db.erl
index d00f9914dc68013411396a9b8fb94a085848d818..82e06a9e0b21cd9bde8ddb1cda2ffb9156318846 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_db).
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_event.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_event.erl
index 677d5f218cca422142464fe86fa21d95169c9f44..6e928166cf7f295a3c1c8beb9bb2bbdd6a451e2b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_event).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_exchange.erl
index fa6102ca21641e6b1ab0a056822c8394107f7829..773e204921b13036c86c8a1627cb8f25e1ea30ae 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% TODO rename this
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl
index 12f53168af2dea14ddff8bec5fcec3a747c03f36..14e310f31b7c648b93e598ddab27fcf09a48e143 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_exchange_link).
@@ -192,16 +192,16 @@ cast(Msg)        -> [gen_server2:cast(Pid, Msg) || Pid <- all()].
 cast(XName, Msg) -> [gen_server2:cast(Pid, Msg) || Pid <- x(XName)].
 
 join(Name) ->
-    pg2_fixed:create(pgname(Name)),
-    ok = pg2_fixed:join(pgname(Name), self()).
+    pg2:create(pgname(Name)),
+    ok = pg2:join(pgname(Name), self()).
 
 all() ->
-    pg2_fixed:create(pgname(rabbit_federation_exchanges)),
-    pg2_fixed:get_members(pgname(rabbit_federation_exchanges)).
+    pg2:create(pgname(rabbit_federation_exchanges)),
+    pg2:get_members(pgname(rabbit_federation_exchanges)).
 
 x(XName) ->
-    pg2_fixed:create(pgname({rabbit_federation_exchange, XName})),
-    pg2_fixed:get_members(pgname({rabbit_federation_exchange, XName})).
+    pg2:create(pgname({rabbit_federation_exchange, XName})),
+    pg2:get_members(pgname({rabbit_federation_exchange, XName})).
 
 %%----------------------------------------------------------------------------
 
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link_sup_sup.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl
index 529edea1134465473c2e203743acdfbe0a677813..ccb2719118f03b4a51830d77ce1077c373aaf83d 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_exchange_link_sup_sup).
@@ -40,7 +40,7 @@ start_child(X) ->
     case mirrored_supervisor:start_child(
            ?SUPERVISOR,
            {id(X), {rabbit_federation_link_sup, start_link, [X]},
-            transient, ?MAX_WAIT, supervisor,
+            transient, ?SUPERVISOR_WAIT, supervisor,
             [rabbit_federation_link_sup]}) of
         {ok, _Pid}             -> ok;
         %% A link returned {stop, gone}, the link_sup shut down, that's OK.
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_sup.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl
index 2999a189b87798a7338c4a035088c8239135163b..402fe30d570097ad6c831a9e2513f3320b7025fc 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_link_sup).
@@ -100,12 +100,12 @@ specs(XorQ) ->
 
 spec(U = #upstream{reconnect_delay = Delay}, #exchange{name = XName}) ->
     {U, {rabbit_federation_exchange_link, start_link, [{U, XName}]},
-     {permanent, Delay}, ?MAX_WAIT, worker,
+     {permanent, Delay}, ?WORKER_WAIT, worker,
      [rabbit_federation_link]};
 
 spec(Upstream = #upstream{reconnect_delay = Delay}, Q = #amqqueue{}) ->
     {Upstream, {rabbit_federation_queue_link, start_link, [{Upstream, Q}]},
-     {permanent, Delay}, ?MAX_WAIT, worker,
+     {permanent, Delay}, ?WORKER_WAIT, worker,
      [rabbit_federation_queue_link]}.
 
 name(#exchange{name = XName}) -> XName;
similarity index 89%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_util.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_link_util.erl
index 757331f67a58525164c2bb711f001af332ef4902..91730a084f6e98f7321ffe322be207004f106aaf 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_link_util).
@@ -23,7 +23,7 @@
 -export([start_conn_ch/5, disposable_channel_call/2, disposable_channel_call/3,
          disposable_connection_call/3, ensure_connection_closed/1,
          log_terminate/4, unacked_new/0, ack/3, nack/3, forward/9,
-         handle_down/6]).
+         handle_down/6, get_connection_name/2]).
 
 %% temp
 -export([connection_error/6]).
@@ -36,7 +36,7 @@
 
 start_conn_ch(Fun, Upstream, UParams,
               XorQName = #resource{virtual_host = DownVHost}, State) ->
-    case open_monitor(#amqp_params_direct{virtual_host = DownVHost}) of
+    case open_monitor(#amqp_params_direct{virtual_host = DownVHost}, get_connection_name(Upstream, UParams)) of
         {ok, DConn, DCh} ->
             case Upstream#upstream.ack_mode of
                 'on-confirm' ->
@@ -46,7 +46,7 @@ start_conn_ch(Fun, Upstream, UParams,
                 _ ->
                     ok
             end,
-            case open_monitor(UParams#upstream_params.params) of
+            case open_monitor(UParams#upstream_params.params, get_connection_name(Upstream, UParams)) of
                 {ok, Conn, Ch} ->
                     %% Don't trap exits until we have established
                     %% connections so that if we try to delete
@@ -82,15 +82,36 @@ start_conn_ch(Fun, Upstream, UParams,
                              Upstream, UParams, XorQName, State)
     end.
 
-open_monitor(Params) ->
-    case open(Params) of
+get_connection_name(#upstream{name = UpstreamName},
+    #upstream_params{x_or_q = Resource}) when is_record(Resource, exchange)->
+    Policy = Resource#exchange.policy,
+    PolicyName = proplists:get_value(name, Policy),
+    connection_name(UpstreamName, PolicyName);
+
+get_connection_name(#upstream{name = UpstreamName},
+    #upstream_params{x_or_q = Resource}) when is_record(Resource, amqqueue)->
+    Policy = Resource#amqqueue.policy,
+    PolicyName = proplists:get_value(name, Policy),
+    connection_name(UpstreamName, PolicyName);
+
+get_connection_name(_, _) ->
+    connection_name(undefined, undefined).
+
+connection_name(Upstream, Policy) when is_binary(Upstream), is_binary(Policy) ->
+    <<<<"Federation link (upstream: ">>/binary, Upstream/binary, <<", policy: ">>/binary, Policy/binary, <<")">>/binary>>;
+
+connection_name(_, _) ->
+    <<"Federation link">>.
+
+open_monitor(Params, Name) ->
+    case open(Params, Name) of
         {ok, Conn, Ch} -> erlang:monitor(process, Ch),
                           {ok, Conn, Ch};
         E              -> E
     end.
 
-open(Params) ->
-    case amqp_connection:start(Params) of
+open(Params, Name) ->
+    case amqp_connection:start(Params, Name) of
         {ok, Conn} -> case amqp_connection:open_channel(Conn) of
                           {ok, Ch} -> {ok, Conn, Ch};
                           E        -> catch amqp_connection:close(Conn),
@@ -273,7 +294,7 @@ disposable_channel_call(Conn, Method, ErrFun) ->
     end.
 
 disposable_connection_call(Params, Method, ErrFun) ->
-    case open(Params) of
+    case open(Params, undefined) of
         {ok, Conn, Ch} ->
             try
                 amqp_channel:call(Ch, Method)
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_parameters.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_parameters.erl
index c05f4c07e28e149bdd01de2462705796b81b8760..30549f18a17dbb0dfd05a33686e6957035ad20fa 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_parameters).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_queue.erl
index 49c4f40207007531c26d027905339b61be299818..ec9ec8dc0bca81232e1d6008f341c91b916422a5 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_queue).
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl
index e498f76a536fc0b09efcd35e10cf186ade485524..80785ee0d26845695ebaeb12fd8eac86ae435866 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_queue_link).
@@ -46,16 +46,16 @@ cast(Msg)        -> [gen_server2:cast(Pid, Msg) || Pid <- all()].
 cast(QName, Msg) -> [gen_server2:cast(Pid, Msg) || Pid <- q(QName)].
 
 join(Name) ->
-    pg2_fixed:create(pgname(Name)),
-    ok = pg2_fixed:join(pgname(Name), self()).
+    pg2:create(pgname(Name)),
+    ok = pg2:join(pgname(Name), self()).
 
 all() ->
-    pg2_fixed:create(pgname(rabbit_federation_queues)),
-    pg2_fixed:get_members(pgname(rabbit_federation_queues)).
+    pg2:create(pgname(rabbit_federation_queues)),
+    pg2:get_members(pgname(rabbit_federation_queues)).
 
 q(QName) ->
-    pg2_fixed:create(pgname({rabbit_federation_queue, QName})),
-    pg2_fixed:get_members(pgname({rabbit_federation_queue, QName})).
+    pg2:create(pgname({rabbit_federation_queue, QName})),
+    pg2:get_members(pgname({rabbit_federation_queue, QName})).
 
 federation_up() ->
     proplists:is_defined(rabbitmq_federation,
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link_sup_sup.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl
index 9c6a70344785d1be1803a129b5617076092e95b2..f5b6a342dc3910946110d7c33ba52a1216da9514 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_queue_link_sup_sup).
@@ -38,7 +38,7 @@ start_child(Q) ->
     case supervisor2:start_child(
            ?SUPERVISOR,
            {id(Q), {rabbit_federation_link_sup, start_link, [Q]},
-            transient, ?MAX_WAIT, supervisor,
+            transient, ?SUPERVISOR_WAIT, supervisor,
             [rabbit_federation_link_sup]}) of
         {ok, _Pid}             -> ok;
         %% A link returned {stop, gone}, the link_sup shut down, that's OK.
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_status.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_status.erl
index 59dc79e15e2be31195ed9018429438643e036c5b..af6c6c1742ba40d9fb09628ada75a910d2d14ba2 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ Federation.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_status).
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_sup.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_sup.erl
index 52a837d0f1f7a93062924a2b9725a737ab90b18e..f19e8c7aa58eb1d4347f5454471a834842d77295 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_sup).
@@ -55,14 +55,14 @@ stop() ->
 
 init([]) ->
     Status = {status, {rabbit_federation_status, start_link, []},
-              transient, ?MAX_WAIT, worker,
+              transient, ?WORKER_WAIT, worker,
               [rabbit_federation_status]},
     XLinkSupSup = {x_links,
                    {rabbit_federation_exchange_link_sup_sup, start_link, []},
-                   transient, ?MAX_WAIT, supervisor,
+                   transient, ?SUPERVISOR_WAIT, supervisor,
                    [rabbit_federation_exchange_link_sup_sup]},
     QLinkSupSup = {q_links,
                    {rabbit_federation_queue_link_sup_sup, start_link, []},
-                  transient, ?MAX_WAIT, supervisor,
+                  transient, ?SUPERVISOR_WAIT, supervisor,
                   [rabbit_federation_queue_link_sup_sup]},
     {ok, {{one_for_one, 3, 10}, [Status, XLinkSupSup, QLinkSupSup]}}.
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_upstream.erl
index ae4c512d3973400bb1621544bb06ec19480f57da..3e00500b0aebb80dffa34d38b636e5e342289270 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_upstream).
@@ -71,8 +71,7 @@ remove_credentials(URI) ->
     list_to_binary(amqp_uri:remove_credentials(binary_to_list(URI))).
 
 to_params(Upstream = #upstream{uris = URIs}, XorQ) ->
-    random:seed(now()),
-    URI = lists:nth(random:uniform(length(URIs)), URIs),
+    URI = lists:nth(rand_compat:uniform(length(URIs)), URIs),
     {ok, Params} = amqp_uri:parse(binary_to_list(URI), vhost(XorQ)),
     XorQ1 = with_name(Upstream, vhost(Params), XorQ),
     SafeURI = remove_credentials(URI),
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream_exchange.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl
index 920bc9fea2301932b60e349f4dfc0aa10b634014..61aaf6b2e316bf060d1359f1e1fad97509e59806 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_upstream_exchange).
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_util.erl
rename to deps/rabbitmq_federation/src/rabbit_federation_util.erl
index 33e903e281191e4581e34508744d51468b4ac65a..24bc138063be1e59e3790bdc863a857e830924e0 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_util).
similarity index 67%
rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbitmq_federation.app.src
rename to deps/rabbitmq_federation/src/rabbitmq_federation.app.src
index 0100da3373595d909edffb22ae4ee940c0d194a6..6f8c2e8c504bb3f5e3e426bfb2e25ca44f93e979 100644 (file)
@@ -1,8 +1,8 @@
 {application, rabbitmq_federation,
  [{description, "RabbitMQ Federation"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_federation_app, []}},
   {env, [{pgroup_name_cluster_id, false}]},
-  {applications, [kernel, stdlib, rabbit, amqp_client]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, amqp_client]}]}.
diff --git a/deps/rabbitmq_federation_management/CODE_OF_CONDUCT.md b/deps/rabbitmq_federation_management/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_federation_management/CONTRIBUTING.md b/deps/rabbitmq_federation_management/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_federation_management/LICENSE b/deps/rabbitmq_federation_management/LICENSE
new file mode 100644 (file)
index 0000000..66bf00f
--- /dev/null
@@ -0,0 +1,13 @@
+This package, the RabbitMQ FederationManagement Plugin is licensed under the MPL. For the
+MPL, please see LICENSE-MPL-RabbitMQ.
+
+This package makes use of the following third party libraries:
+
+jQuery     - http://jquery.com/                       - MIT license, see LICENSE-MIT-jQuery164
+EJS        - http://embeddedjs.com/                   - MIT license, see LICENSE-MIT-EJS10
+Sammy      - http://code.quirkey.com/sammy/           - MIT license, see LICENSE-MIT-Sammy060
+cowboy     - http://github.com/ninenines/cowboy       - ISC license
+base64.js  - http://code.google.com/p/stringencoders/ - BSD license, see LICENSE-BSD-base64js
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ
new file mode 100644 (file)
index 0000000..e163fcc
--- /dev/null
@@ -0,0 +1,455 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the NPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is RabbitMQ Management Plugin.
+
+     The Initial Developer of the Original Code is GoPivotal, Inc.
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
+
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
diff --git a/deps/rabbitmq_federation_management/Makefile b/deps/rabbitmq_federation_management/Makefile
new file mode 100644 (file)
index 0000000..7a9e150
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_federation_management
+
+DEPS = rabbit_common rabbit rabbitmq_management webmachine
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_federation_management/README.md b/deps/rabbitmq_federation_management/README.md
new file mode 100644 (file)
index 0000000..d9e57b0
--- /dev/null
@@ -0,0 +1,38 @@
+# RabbitMQ Federation Management Plugin
+
+This plugin adds information on federation link status to the management
+plugin.
+
+
+## Installation
+
+In recent releases, this plugin ships with RabbitMQ. Enable it with
+
+    rabbitmq-plugins enable rabbitmq_management rabbitmq_federation_management
+
+If you have a heterogenous cluster (where the nodes have different
+plugins installed), this should be installed on the same nodes as the
+management plugin.
+
+
+## Use over HTTP API
+
+The HTTP API provided is tiny:
+
+    GET /api/federation-links
+
+
+## Building From Source
+
+To [build the plugin](http://www.rabbitmq.com/plugin-development.html), use
+
+    make dist
+
+and see under the `./plugins` directory.
+
+
+## Copyright and License
+
+(c) Pivotal Software Inc, 2007-2016.
+
+See `LICENSE` for license information.
diff --git a/deps/rabbitmq_federation_management/erlang.mk b/deps/rabbitmq_federation_management/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 90%
rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/federation.js
rename to deps/rabbitmq_federation_management/priv/www/js/federation.js
index c989072e86a5acaa3beb74c3ae05d47445b34e37..a5f74b460c0a71e3dd08257a541e5c17793ccc32 100644 (file)
@@ -37,21 +37,14 @@ NAVIGATION['Admin'][0]['Federation Upstreams'] = ['#/federation-upstreams', "pol
 HELP['federation-uri'] =
     'URI to connect to. If upstream is a cluster and can have several URIs, you can enter them here separated by spaces.';
 
-HELP['federation-expires'] =
-    'Time in milliseconds that the upstream should remember about this node for. After this time all upstream state will be removed. Leave this blank to mean "forever".';
-
-HELP['federation-ttl'] =
-    'Time in milliseconds that undelivered messages should be held upstream when there is a network outage or backlog. Leave this blank to mean "forever".';
-
-HELP['federation-max-hops'] =
-    'Maximum number of federation links that messages can traverse before being dropped. Defaults to 1 if not set.';
-
 HELP['federation-prefetch'] =
     'Maximum number of unacknowledged messages that may be in flight over a federation link at one time. Defaults to 1000 if not set.';
 
+
 HELP['federation-reconnect'] =
     'Time in seconds to wait after a network link goes down before attempting reconnection. Defaults to 5 if not set.';
 
+
 HELP['federation-ack-mode'] =
     '<dl>\
        <dt><code>on-confirm</code></dt>\
@@ -65,6 +58,25 @@ HELP['federation-ack-mode'] =
 HELP['federation-trust-user-id'] =
     'Set "Yes" to preserve the "user-id" field across a federation link, even if the user-id does not match that used to republish the message. Set to "No" to clear the "user-id" field when messages are federated. Only set this to "Yes" if you trust the upstream broker not to forge user-ids.';
 
+HELP['exchange'] =
+    'The name of the upstream exchange. Default is to use the same name as the federated exchange.';
+
+HELP['federation-max-hops'] =
+    'Maximum number of federation links that messages can traverse before being dropped. Defaults to 1 if not set.';
+
+HELP['federation-expires'] =
+    'Time in milliseconds that the upstream should remember about this node for. After this time all upstream state will be removed. Leave this blank to mean "forever".';
+
+HELP['federation-ttl'] =
+    'Time in milliseconds that undelivered messages should be held upstream when there is a network outage or backlog. Leave this blank to mean "forever".';
+
+HELP['ha-policy'] =
+    'Determines the "x-ha-policy" argument for the upstream queue for a federated exchange. Default is "none", meaning the queue is not HA.';
+
+HELP['queue'] =
+    'The name of the upstream queue. Default is to use the same name as the federated queue.';
+
+
 function link_fed_conn(vhost, name) {
     return _link_to(name, '#/federation-upstreams/' + esc(vhost) + '/' + esc(name));
 }
similarity index 72%
rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstream.ejs
rename to deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstream.ejs
index 6e66b4e336c3377b5d7c67212ec77393b4998686..da1331bd18f29fc9ae0acec06c9b90f99fc97508 100644 (file)
@@ -4,22 +4,14 @@
   <h2>Overview</h2>
   <div class="hider">
     <table class="facts">
+       <tr>
+          <th>
+          <h3>General parameters</h3>
+         </th>
       <tr>
         <th>URI</th>
         <td><%= fmt_string(upstream.value.uri) %></td>
       </tr>
-      <tr>
-        <th>Expires</th>
-        <td><%= fmt_time(upstream.value.expires, 'ms') %></td>
-      </tr>
-      <tr>
-        <th>Message TTL</th>
-        <td><%= fmt_time(upstream.value['message-ttl'], 'ms') %></td>
-      </tr>
-      <tr>
-        <th>Max Hops</th>
-        <td><%= fmt_string(upstream.value['max-hops']) %></td>
-      </tr>
       <tr>
         <th>Prefetch Count</th>
         <td><%= fmt_string(upstream.value['prefetch-count']) %></td>
         <th>Trust User-ID</th>
         <td><%= fmt_boolean(upstream.value['trust-user-id']) %></td>
       </tr>
+
+       <tr>
+          <th>
+          <h3>Federated exchange parameters</h3>
+         </th>
+        </tr>
+      <tr>
+        <th>Exchange</th>
+        <td><%= fmt_string(upstream.value['exchange']) %></td>
+      </tr>
+
+
+      <tr>
+        <th>Max Hops</th>
+        <td><%= fmt_string(upstream.value['max-hops']) %></td>
+      </tr>
+
+      <tr>
+        <th>Expires</th>
+        <td><%= fmt_time(upstream.value.expires, 'ms') %></td>
+      </tr>
+
+      <tr>
+        <th>Message TTL</th>
+        <td><%= fmt_time(upstream.value['message-ttl'], 'ms') %></td>
+      </tr>
+
+      <tr>
+        <th>HA Policy</th>
+        <td><%= fmt_string(upstream.value['ha-policy']) %></td>
+      </tr>
+
+       <tr>
+          <th>
+          <h3>Federated queue parameters</h3>
+         </th>
+        </tr>
+
+      <tr>
+        <th>Queue</th>
+        <td><%= fmt_string(upstream.value['queue']) %></td>
+      </tr>
+
+
+
     </table>
   </div>
 </div>
similarity index 81%
rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstreams.ejs
rename to deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs
index ff23d38c5cdf511f422b0151d6fa62f022d262a5..dc2341723a773e1e84e9d7bf449017e639a1e847 100644 (file)
 <% } %>
     <th>Name</th>
     <th>URI</th>
-    <th>Expiry</th>
-    <th>Message TTL</th>
-    <th>Max Hops</th>
     <th>Prefetch Count</th>
     <th>Reconnect Delay</th>
     <th>Ack mode</th>
     <th>Trust User-ID</th>
+    <th>Exchange</th>
+    <th>Max Hops</th>
+    <th>Expiry</th>
+    <th>Message TTL</th>
+    <th>HA Policy</th>
+    <th>Queue</th>
+
+
   </tr>
  </thead>
  <tbody>
 <% } %>
      <td><%= link_fed_conn(upstream.vhost, upstream.name) %></td>
      <td><%= fmt_shortened_uri(upstream.value.uri) %></td>
-     <td class="r"><%= fmt_time(upstream.value.expires, 'ms') %></td>
-     <td class="r"><%= fmt_time(upstream.value['message-ttl'], 'ms') %></td>
-     <td class="r"><%= upstream.value['max-hops'] %></td>
      <td class="r"><%= upstream.value['prefetch-count'] %></td>
      <td class="r"><%= fmt_time(upstream.value['reconnect-delay'], 's') %></td>
      <td class="c"><%= fmt_string(upstream.value['ack-mode']) %></td>
      <td class="c"><%= fmt_boolean(upstream.value['trust-user-id']) %></td>
+     <td class="c"><%= fmt_string(upstream.value['exchange']) %></td>
+     <td class="r"><%= upstream.value['max-hops'] %></td>
+     <td class="r"><%= fmt_time(upstream.value.expires, 'ms') %></td>
+     <td class="r"><%= fmt_time(upstream.value['message-ttl'], 'ms') %></td>
+     <td class="r"><%= upstream.value['ha-policy'] %></td>
+     <td class="r"><%= upstream.value['queue'] %></td>
    </tr>
 <% } %>
  </tbody>
     <form action="#/fed-parameters" method="put">
       <input type="hidden" name="component" value="federation-upstream"/>
       <table class="form">
+       <tr>
+          <th>
+          <h3>  General parameters </h3>
+         </th>
+        </tr>
 <% if (vhosts_interesting) { %>
         <tr>
           <th><label>Virtual host:</label></th>
@@ -72,6 +85,8 @@
           <th><label>Name:</label></th>
           <td><input type="text" name="name"/><span class="mand">*</span></td>
         </tr>
+       
+
         <tr>
           <th>
             <label>
           </th>
           <td><input type="text" name="uri"/><span class="mand">*</span></td>
         </tr>
-        <tr>
-          <th>
-            <label>
-              Expires:
-              <span class="help" id="federation-expires"></span>
-            </label>
-          </th>
-          <td><input type="text" name="expires"/> ms</td>
-        </tr>
-        <tr>
-          <th>
-            <label>
-              Message TTL:
-              <span class="help" id="federation-ttl"></span>
-            </label>
-          </th>
-          <td><input type="text" name="message-ttl"/> ms</td>
-        </tr>
-        <tr>
-          <th>
-            <label>
-              Max hops:
-              <span class="help" id="federation-max-hops"></span>
-            </label>
-          </th>
-          <td><input type="text" name="max-hops"/></td>
-        </tr>
         <tr>
           <th>
             <label>
               <span class="help" id="federation-trust-user-id"></span>
             </label>
           </th>
+          
           <td>
             <select name="trust-user-id">
               <option value="false">No</option>
               <option value="true">Yes</option>
             </select>
           </td>
+
+        <tr>
+          <th>
+          <h3>Federated exchanges parameters </h3>
+         </th>
         </tr>
+
+
+         <tr>
+          <th>
+            <label>
+              Exchange:
+              <span class="help" id="exchange"></span>
+            </label>
+          </th>
+          <td><input type="text" name="exchange"/></td>
+        </tr>
+
+       <tr>
+          <th>
+            <label>
+              Max hops:
+              <span class="help" id="federation-max-hops"></span>
+            </label>
+          </th>
+          <td><input type="text" name="max-hops"/></td>
+        </tr>
+
+        <tr>
+          <th>
+            <label>
+              Expires:
+              <span class="help" id="federation-expires"></span>
+            </label>
+          </th>
+          <td><input type="text" name="expires"/> ms</td>
+        </tr>
+
+        <tr>
+          <th>
+            <label>
+              Message TTL:
+              <span class="help" id="federation-ttl"></span>
+            </label>
+          </th>
+          <td><input type="text" name="message-ttl"/> ms</td>
+        </tr>
+
+
+        <tr>
+          <th>
+            <label>
+              HA Policy:
+              <span class="help" id="ha-policy"></span>
+            </label>
+          </th>
+          <td><input type="text" name="ha-policy"/></td>
+        </tr>
+        </tr>
+
+       <tr>
+          <th>
+          <h3>Federated queues parameter </h3>
+         </th>
+        </tr>
+
+         <tr>
+          <th>
+            <label>
+              Queue:
+              <span class="help" id="queue"></span>
+            </label>
+          </th>
+          <td><input type="text" name="queue"/></td>
+        </tr>
+        </tr>
+
+
+
       </table>
       <input type="submit" value="Add upstream"/>
     </form>
diff --git a/deps/rabbitmq_federation_management/rabbitmq-components.mk b/deps/rabbitmq_federation_management/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbit_federation_mgmt.erl
rename to deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl
index 10dde014bf4752ec1fcd627e4239b83292544aa5..b4dc5f5d02e6c716ce2e80ec9f844affadf6ec83 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_federation_mgmt).
similarity index 61%
rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbitmq_federation_management.app.src
rename to deps/rabbitmq_federation_management/src/rabbitmq_federation_management.app.src
index e8374dc34e7062e801b0eecc33b761bdd3f98860..5a6605457a5a701525db5fa742a3fd5688be8a71 100644 (file)
@@ -1,8 +1,8 @@
 {application, rabbitmq_federation_management,
  [{description, "RabbitMQ Federation Management"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {env, []},
-  {applications, [kernel, stdlib, rabbitmq_management]}
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_management]}
  ]}.
diff --git a/deps/rabbitmq_jms_topic_exchange/CODE_OF_CONDUCT.md b/deps/rabbitmq_jms_topic_exchange/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md b/deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_jms_topic_exchange/LICENSE b/deps/rabbitmq_jms_topic_exchange/LICENSE
new file mode 100644 (file)
index 0000000..53c9573
--- /dev/null
@@ -0,0 +1,461 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the NPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is rabbitmq-jms-topic-exchange.
+
+     The Initial Developer of the Original Code is Pivotal Software, Inc.
+
+     All Rights Reserved.
+
+     Contributor(s): ______________________________________.''
+
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
+
+
+
diff --git a/deps/rabbitmq_jms_topic_exchange/LICENSES.txt b/deps/rabbitmq_jms_topic_exchange/LICENSES.txt
new file mode 100644 (file)
index 0000000..43fc4b6
--- /dev/null
@@ -0,0 +1,865 @@
+open_source_license.txt\r
+\r
+JMS Client for Pivotal RabbitMQ 1.4.6 GA\r
+\r
+===========================================================================\r
+\r
+The following copyright statements and licenses apply to various open\r
+source software packages (or portions thereof) that are distributed with\r
+this Pivotal Software, Inc. Product.\r
+\r
+The Pivotal Product may also include other Pivotal components, which may\r
+contain additional open source software packages. One or more such\r
+open_source_licenses.txt files may therefore accompany this Pivotal\r
+Product.\r
+\r
+The Pivotal Product that includes this file does not necessarily use all\r
+the open source software packages referred to below and may also only\r
+use portions of a given package.\r
+\r
+\r
+=============== TABLE OF CONTENTS =============================\r
+\r
+\r
+The following is a listing of the open source components detailed in\r
+this document. This list is provided for your convenience; please read\r
+further if you wish to review the copyright notice(s) and the full text\r
+of the license associated with each component.\r
+\r
+\r
+\r
+\r
+SECTION 1: BSD-STYLE, MIT-STYLE, OR SIMILAR STYLE LICENSES\r
+\r
+   >>> slf4j-api-1.7.5\r
+\r
+\r
+\r
+SECTION 2: Apache License, V2.0\r
+\r
+   >>> geronimo-jms_1.1_spec-1.1.1\r
+\r
+\r
+\r
+SECTION 3: Mozilla Public License, V1.1\r
+\r
+   >>> amqp-client-3.5.6\r
+\r
+\r
+\r
+APPENDIX. Standard License Files\r
+\r
+   >>> Apache License, V2.0\r
+\r
+   >>> Mozilla Public License, V1.1\r
+\r
+\r
+\r
+--------------- SECTION 1:  BSD-STYLE, MIT-STYLE, OR SIMILAR STYLE LICENSES ----------\r
+\r
+BSD-STYLE, MIT-STYLE, OR SIMILAR STYLE LICENSES are applicable to the following component(s).\r
+\r
+\r
+>>> slf4j-api-1.7.5\r
+\r
+Copyright (c) 2004-2011 QOS.ch\r
+All rights reserved.\r
+\r
+Permission is hereby granted, free  of charge, to any person obtaining\r
+a  copy  of this  software  and  associated  documentation files  (the\r
+"Software"), to  deal in  the Software without  restriction, including\r
+without limitation  the rights to  use, copy, modify,  merge, publish,\r
+distribute,  sublicense, and/or sell  copies of  the Software,  and to\r
+permit persons to whom the Software  is furnished to do so, subject to\r
+the following conditions:\r
+\r
+The  above  copyright  notice  and  this permission  notice  shall  be\r
+included in all copies or substantial portions of the Software.\r
+\r
+THE  SOFTWARE IS  PROVIDED  "AS  IS", WITHOUT  WARRANTY  OF ANY  KIND,\r
+EXPRESS OR  IMPLIED, INCLUDING  BUT NOT LIMITED  TO THE  WARRANTIES OF\r
+MERCHANTABILITY,    FITNESS    FOR    A   PARTICULAR    PURPOSE    AND\r
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\r
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\r
+OF CONTRACT, TORT OR OTHERWISE,  ARISING FROM, OUT OF OR IN CONNECTION\r
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\r
+\r
+\r
+--------------- SECTION 2: Apache License, V2.0 ----------\r
+\r
+Apache License, V2.0 is applicable to the following component(s).\r
+\r
+\r
+>>> geronimo-jms_1.1_spec-1.1.1\r
+\r
+Apache Geronimo \r
+Copyright 2003-2008 The Apache Software Foundation\r
+\r
+This product includes software developed by\r
+The Apache Software Foundation (http://www.apache.org/).\r
+\r
+\r
+--------------- SECTION 3: Mozilla Public License, V1.1 ----------\r
+\r
+Mozilla Public License, V1.1 is applicable to the following component(s).\r
+\r
+\r
+>>> amqp-client-3.5.6\r
+\r
+//  The contents of this file are subject to the Mozilla Public License\r
+//  Version 1.1 (the "License"); you may not use this file except in\r
+//  compliance with the License. You may obtain a copy of the License\r
+//  at http://www.mozilla.org/MPL/\r
+//\r
+//  Software distributed under the License is distributed on an "AS IS"\r
+//  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See\r
+//  the License for the specific language governing rights and\r
+//  limitations under the License.\r
+//\r
+//  The Original Code is RabbitMQ.\r
+//\r
+//  The Initial Developer of the Original Code is GoPivotal, Inc.\r
+//  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.\r
+\r
+ADDITIONAL LICENSE INFORMATION:\r
+\r
+> Apache 2.0\r
+\r
+amqp-client-3.5.6-sources.jar\com\rabbitmq\tools\json\JSONWriter.java\r
+\r
+/*\r
+   Copyright (c) 2006-2007 Frank Carver\r
+   Copyright (c) 2007-2015 Pivotal Software, Inc. All Rights Reserved\r
+\r
+   Licensed under the Apache License, Version 2.0 (the "License");\r
+   you may not use this file except in compliance with the License.\r
+   You may obtain a copy of the License at\r
+\r
+       http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+   Unless required by applicable law or agreed to in writing, software\r
+   distributed under the License is distributed on an "AS IS" BASIS,\r
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+   See the License for the specific language governing permissions and\r
+   limitations under the License.\r
+\r
+\r
+\r
+> Public Domain\r
+\r
+amqp-client-3.5.6-sources.jar\com\rabbitmq\client\impl\VariableLinkedBlockingQueue.java\r
+\r
+/*\r
+ * Modifications Copyright 2015 Pivotal Software, Inc and licenced as per\r
+ * the rest of the RabbitMQ Java client.\r
+ */\r
+* Written by Doug Lea with assistance from members of JCP JSR-166\r
+ * Expert Group and released to the public domain, as explained at\r
+ * http://creativecommons.org/licenses/publicdomain\r
+ */\r
+\r
+\r
+=============== APPENDIX. Standard License Files ============== \r
+\r
+\r
+\r
+--------------- SECTION 1: Apache License, V2.0 -----------\r
+\r
+Apache License \r
+\r
+Version 2.0, January 2004 \r
+http://www.apache.org/licenses/ \r
+\r
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION \r
+\r
+1. Definitions.\r
+\r
+"License" shall mean the terms and conditions for use, reproduction,\r
+and distribution as defined by Sections 1 through 9 of this document.\r
+\r
+"Licensor" shall mean the copyright owner or entity authorized by the\r
+copyright owner that is granting the License.  \r
+\r
+"Legal Entity" shall mean the union of the acting entity and all other\r
+entities that control, are controlled by, or are under common control\r
+with that entity. For the purposes of this definition, "control" means\r
+(i) the power, direct or indirect, to cause the direction or management\r
+of such entity, whether by contract or otherwise, or (ii) ownership\r
+of fifty percent (50%) or more of the outstanding shares, or (iii)\r
+beneficial ownership of such entity.\r
+\r
+"You" (or "Your") shall mean an individual or Legal Entity exercising\r
+permissions granted by this License.  \r
+\r
+"Source" form shall mean the preferred form for making modifications,\r
+including but not limited to software source code, documentation source,\r
+and configuration files.\r
+\r
+"Object" form shall mean any form resulting from mechanical transformation\r
+or translation of a Source form, including but not limited to compiled\r
+object code, generated documentation, and conversions to other media\r
+types.  \r
+\r
+"Work" shall mean the work of authorship, whether in Source or\r
+Object form, made available under the License, as indicated by a copyright\r
+notice that is included in or attached to the work (an example is provided\r
+in the Appendix below).  \r
+\r
+"Derivative Works" shall mean any work, whether in Source or Object form,\r
+that is based on (or derived from) the Work and for which the editorial\r
+revisions, annotations, elaborations, or other modifications represent,\r
+as a whole, an original work of authorship. For the purposes of this\r
+License, Derivative Works shall not include works that remain separable\r
+from, or merely link (or bind by name) to the interfaces of, the Work\r
+and Derivative Works thereof.\r
+\r
+"Contribution" shall mean any work of authorship, including the\r
+original version of the Work and any modifications or additions to\r
+that Work or Derivative Works thereof, that is intentionally submitted\r
+to Licensor for inclusion in the Work by the copyright owner or by an\r
+individual or Legal Entity authorized to submit on behalf of the copyright\r
+owner. For the purposes of this definition, "submitted" means any form of\r
+electronic, verbal, or written communication sent to the Licensor or its\r
+representatives, including but not limited to communication on electronic\r
+mailing lists, source code control systems, and issue tracking systems\r
+that are managed by, or on behalf of, the Licensor for the purpose of\r
+discussing and improving the Work, but excluding communication that is\r
+conspicuously marked or otherwise designated in writing by the copyright\r
+owner as "Not a Contribution."\r
+\r
+"Contributor" shall mean Licensor and any individual or Legal Entity\r
+on behalf of whom a Contribution has been received by Licensor and\r
+subsequently incorporated within the Work.\r
+\r
+2. Grant of Copyright License.\r
+Subject to the terms and conditions of this License, each Contributor\r
+hereby grants to You a perpetual, worldwide, non-exclusive, no-charge,\r
+royalty-free, irrevocable copyright license to reproduce, prepare\r
+Derivative Works of, publicly display, publicly perform, sublicense, and\r
+distribute the Work and such Derivative Works in Source or Object form.\r
+\r
+3. Grant of Patent License.\r
+Subject to the terms and conditions of this License, each Contributor\r
+hereby grants to You a perpetual, worldwide, non-exclusive, no-charge,\r
+royalty- free, irrevocable (except as stated in this section) patent\r
+license to make, have made, use, offer to sell, sell, import, and\r
+otherwise transfer the Work, where such license applies only to those\r
+patent claims licensable by such Contributor that are necessarily\r
+infringed by their Contribution(s) alone or by combination of\r
+their Contribution(s) with the Work to which such Contribution(s)\r
+was submitted. If You institute patent litigation against any entity\r
+(including a cross-claim or counterclaim in a lawsuit) alleging that the\r
+Work or a Contribution incorporated within the Work constitutes direct\r
+or contributory patent infringement, then any patent licenses granted\r
+to You under this License for that Work shall terminate as of the date\r
+such litigation is filed.\r
+\r
+4. Redistribution.\r
+You may reproduce and distribute copies of the Work or Derivative Works\r
+thereof in any medium, with or without modifications, and in Source or\r
+Object form, provided that You meet the following conditions:\r
+\r
+  a. You must give any other recipients of the Work or Derivative Works\r
+     a copy of this License; and\r
+\r
+  b. You must cause any modified files to carry prominent notices stating\r
+     that You changed the files; and\r
+\r
+  c. You must retain, in the Source form of any Derivative Works that\r
+     You distribute, all copyright, patent, trademark, and attribution\r
+     notices from the Source form of the Work, excluding those notices\r
+     that do not pertain to any part of the Derivative Works; and\r
+\r
+  d. If the Work includes a "NOTICE" text file as part of its\r
+     distribution, then any Derivative Works that You distribute must\r
+     include a readable copy of the attribution notices contained\r
+     within such NOTICE file, excluding those notices that do not\r
+     pertain to any part of the Derivative Works, in at least one of\r
+     the following places: within a NOTICE text file distributed as part\r
+     of the Derivative Works; within the Source form or documentation,\r
+     if provided along with the Derivative Works; or, within a display\r
+     generated by the Derivative Works, if and wherever such third-party\r
+     notices normally appear. The contents of the NOTICE file are for\r
+     informational purposes only and do not modify the License. You\r
+     may add Your own attribution notices within Derivative Works that\r
+     You distribute, alongside or as an addendum to the NOTICE text\r
+     from the Work, provided that such additional attribution notices\r
+     cannot be construed as modifying the License.  You may add Your own\r
+     copyright statement to Your modifications and may provide additional\r
+     or different license terms and conditions for use, reproduction, or\r
+     distribution of Your modifications, or for any such Derivative Works\r
+     as a whole, provided Your use, reproduction, and distribution of the\r
+     Work otherwise complies with the conditions stated in this License.\r
+\r
+5. Submission of Contributions.\r
+Unless You explicitly state otherwise, any Contribution intentionally\r
+submitted for inclusion in the Work by You to the Licensor shall be\r
+under the terms and conditions of this License, without any additional\r
+terms or conditions.  Notwithstanding the above, nothing herein shall\r
+supersede or modify the terms of any separate license agreement you may\r
+have executed with Licensor regarding such Contributions.\r
+\r
+6. Trademarks.\r
+This License does not grant permission to use the trade names, trademarks,\r
+service marks, or product names of the Licensor, except as required for\r
+reasonable and customary use in describing the origin of the Work and\r
+reproducing the content of the NOTICE file.\r
+\r
+7. Disclaimer of Warranty.\r
+Unless required by applicable law or agreed to in writing, Licensor\r
+provides the Work (and each Contributor provides its Contributions) on\r
+an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\r
+express or implied, including, without limitation, any warranties or\r
+conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR\r
+A PARTICULAR PURPOSE. You are solely responsible for determining the\r
+appropriateness of using or redistributing the Work and assume any risks\r
+associated with Your exercise of permissions under this License.\r
+\r
+8. Limitation of Liability.\r
+In no event and under no legal theory, whether in tort (including\r
+negligence), contract, or otherwise, unless required by applicable law\r
+(such as deliberate and grossly negligent acts) or agreed to in writing,\r
+shall any Contributor be liable to You for damages, including any direct,\r
+indirect, special, incidental, or consequential damages of any character\r
+arising as a result of this License or out of the use or inability to\r
+use the Work (including but not limited to damages for loss of goodwill,\r
+work stoppage, computer failure or malfunction, or any and all other\r
+commercial damages or losses), even if such Contributor has been advised\r
+of the possibility of such damages.\r
+\r
+9. Accepting Warranty or Additional Liability.\r
+While redistributing the Work or Derivative Works thereof, You may\r
+choose to offer, and charge a fee for, acceptance of support, warranty,\r
+indemnity, or other liability obligations and/or rights consistent with\r
+this License. However, in accepting such obligations, You may act only\r
+on Your own behalf and on Your sole responsibility, not on behalf of\r
+any other Contributor, and only if You agree to indemnify, defend, and\r
+hold each Contributor harmless for any liability incurred by, or claims\r
+asserted against, such Contributor by reason of your accepting any such\r
+warranty or additional liability.\r
+\r
+END OF TERMS AND CONDITIONS \r
+\r
+\r
+\r
+--------------- SECTION 2: Mozilla Public License, V1.1 -----------\r
+\r
+Mozilla Public License 1.1 (MPL 1.1)\r
+\r
+1. Definitions.\r
+\r
+      1.0.1. "Commercial Use" means distribution or otherwise making the\r
+         Covered Code available to a third party.\r
+\r
+   1.1. "Contributor" means each entity that creates or contributes to\r
+      the creation of Modifications.\r
+\r
+   1.2. "Contributor Version" means the combination of the Original Code,\r
+      prior Modifications used by a Contributor, and the Modifications\r
+      made by that particular Contributor.\r
+\r
+   1.3. "Covered Code" means the Original Code or Modifications or the\r
+      combination of the Original Code and Modifications, in each case\r
+      including portions thereof.\r
+\r
+   1.4. "Electronic Distribution Mechanism" means a mechanism generally\r
+      accepted in the software development community for the electronic\r
+      transfer of data.\r
+\r
+   1.5. "Executable" means Covered Code in any form other than Source\r
+      Code.\r
+\r
+   1.6. "Initial Developer" means the individual or entity identified\r
+      as the Initial Developer in the Source Code notice required by\r
+      Exhibit A.\r
+\r
+   1.7. "Larger Work" means a work which combines Covered Code or portions\r
+      thereof with code not governed by the terms of this License.\r
+\r
+   1.8. "License" means this document.\r
+\r
+      1.8.1. "Licensable" means having the right to grant, to the maximum\r
+         extent possible, whether at the time of the initial grant or\r
+         subsequently acquired, any and all of the rights conveyed herein.\r
+\r
+   1.9. "Modifications" means any addition to or deletion from the\r
+      substance or structure of either the Original Code or any previous\r
+      Modifications. When Covered Code is released as a series of files,\r
+      a Modification is:\r
+\r
+         A. Any addition to or deletion from the contents of a file\r
+            containing Original Code or previous Modifications.\r
+\r
+         B. Any new file that contains any part of the Original Code or\r
+            previous Modifications.\r
+\r
+   1.10. "Original Code" means Source Code of computer software code\r
+      which is described in the Source Code notice required by Exhibit\r
+      A as Original Code, and which, at the time of its release under\r
+      this License is not already Covered Code governed by this License.\r
+\r
+      1.10.1. "Patent Claims" means any patent claim(s), now owned or\r
+         hereafter acquired, including without limitation,  method,\r
+         process, and apparatus claims, in any patent Licensable by\r
+         grantor.\r
+\r
+   1.11. "Source Code" means the preferred form of the Covered Code for\r
+      making modifications to it, including all modules it contains,\r
+      plus any associated interface definition files, scripts used to\r
+      control compilation and installation of an Executable, or source\r
+      code differential comparisons against either the Original Code or\r
+      another well known, available Covered Code of the Contributor's\r
+      choice. The Source Code can be in a compressed or archival form,\r
+      provided the appropriate decompression or de-archiving software\r
+      is widely available for no charge.\r
+\r
+   1.12. "You" (or "Your")  means an individual or a legal entity exercising\r
+      rights under, and complying with all of the terms of, this License\r
+      or a future version of this License issued under Section 6.1.\r
+      For legal entities, "You" includes any entity which controls, is\r
+      controlled by, or is under common control with You. For purposes\r
+      of this definition, "control" means (a) the power, direct or\r
+      indirect, to cause the direction or management of such entity,\r
+      whether by contract or otherwise, or (b) ownership of more than\r
+      fifty percent (50%) of the outstanding shares or beneficial\r
+      ownership of such entity.\r
+\r
+2. Source Code License.\r
+\r
+   2.1. The Initial Developer Grant.\r
+      The Initial Developer hereby grants You a world-wide, royalty-free,\r
+      non-exclusive license, subject to third party intellectual property\r
+      claims:\r
+\r
+      (a) under intellectual property rights (other than patent or\r
+          trademark) Licensable by Initial Developer to use, reproduce,\r
+          modify, display, perform, sublicense and distribute the Original\r
+          Code (or portions thereof) with or without Modifications,\r
+          and/or as part of a Larger Work; and\r
+\r
+      (b) under Patents Claims infringed by the making, using or selling\r
+          of Original Code, to make, have made, use, practice, sell, and\r
+          offer for sale, and/or otherwise dispose of the Original Code\r
+          (or portions thereof).\r
+\r
+      (c) the licenses granted in this Section 2.1(a) and (b) are\r
+          effective on the date Initial Developer first distributes\r
+          Original Code under the terms of this License.\r
+\r
+      (d) Notwithstanding Section 2.1(b) above, no patent license is\r
+          granted: 1) for code that You delete from the Original Code;\r
+          2) separate from the Original Code;  or 3) for infringements\r
+          caused by: i) the modification of the Original Code or ii) the\r
+          combination of the Original Code with other software or devices.\r
+\r
+   2.2. Contributor Grant.\r
+\r
+      Subject to third party intellectual property claims, each Contributor\r
+      hereby grants You a world-wide, royalty-free, non-exclusive license\r
+\r
+      (a) under intellectual property rights (other than patent or\r
+          trademark) Licensable by Contributor, to use, reproduce, modify,\r
+          display, perform, sublicense and distribute the Modifications\r
+          created by such Contributor (or portions thereof) either on\r
+          an unmodified basis, with other Modifications, as Covered Code\r
+          and/or as part of a Larger Work; and\r
+\r
+      (b) under Patent Claims infringed by the making, using, or selling\r
+          of  Modifications made by that Contributor either alone and/or\r
+          in combination with its Contributor Version (or portions of such\r
+          combination), to make, use, sell, offer for sale, have made,\r
+          and/or otherwise dispose of: 1) Modifications made by that\r
+          Contributor (or portions thereof); and 2) the combination of\r
+          Modifications made by that Contributor with its Contributor\r
+          Version (or portions of such combination).\r
+\r
+      (c) the licenses granted in Sections 2.2(a) and 2.2(b) are effective\r
+          on the date Contributor first makes Commercial Use of the\r
+          Covered Code.\r
+\r
+      (d) Notwithstanding Section 2.2(b) above, no patent license is\r
+          granted: 1) for any code that Contributor has deleted from\r
+          the Contributor Version; 2) separate from the Contributor\r
+          Version; 3) for infringements caused by: i) third party\r
+          modifications of Contributor Version or ii)  the combination\r
+          of Modifications made by that Contributor with other software\r
+          (except as part of the Contributor Version) or other devices;\r
+          or 4) under Patent Claims infringed by Covered Code in the\r
+          absence of Modifications made by that Contributor.\r
+\r
+3. Distribution Obligations.\r
+\r
+   3.1. Application of License.\r
+\r
+      The Modifications which You create or to which You contribute\r
+      are governed by the terms of this License, including without\r
+      limitation Section 2.2.  The Source Code version of Covered Code\r
+      may be distributed only under the terms of this License or a future\r
+      version of this License released under Section 6.1, and You must\r
+      include a copy of this License with every copy of the Source Code\r
+      You distribute. You may not offer or impose any terms on any Source\r
+      Code version that alters or restricts the applicable version of\r
+      this License or the recipients' rights hereunder. However, You\r
+      may include an additional document offering the additional rights\r
+      described in Section 3.5.\r
+\r
+   3.2. Availability of Source Code.\r
+\r
+      Any Modification which You create or to which You contribute must\r
+      be made available in Source Code form under the terms of this\r
+      License either on the same media as an Executable version or via\r
+      an accepted Electronic Distribution Mechanism to anyone to whom\r
+      you made an Executable version available; and if made available\r
+      via Electronic Distribution Mechanism, must remain available for\r
+      at least twelve (12) months after the date it initially became\r
+      available, or at least six (6) months after a subsequent version\r
+      of that particular Modification has been made available to such\r
+      recipients. You are responsible for ensuring that the Source Code\r
+      version remains available even if the Electronic Distribution\r
+      Mechanism is maintained by a third party.\r
+\r
+   3.3. Description of Modifications.\r
+\r
+      You must cause all Covered Code to which You contribute to contain\r
+      a file documenting the changes You made to create that Covered\r
+      Code and the date of any change. You must include a prominent\r
+      statement that the Modification is derived, directly or indirectly,\r
+      from Original Code provided by the Initial Developer and including\r
+      the name of the Initial Developer in (a) the Source Code, and (b)\r
+      in any notice in an Executable version or related documentation\r
+      in which You describe the origin or ownership of the Covered Code.\r
+\r
+   3.4. Intellectual Property Matters\r
+\r
+      (a) Third Party Claims.\r
+\r
+          If Contributor has knowledge that a license under a third\r
+          party's intellectual property rights is required to exercise\r
+          the rights granted by such Contributor under Sections 2.1 or\r
+          2.2, Contributor must include a text file with the Source Code\r
+          distribution titled "LEGAL" which describes the claim and the\r
+          party making the claim in sufficient detail that a recipient\r
+          will know whom to contact. If Contributor obtains such knowledge\r
+          after the Modification is made available as described in Section\r
+          3.2, Contributor shall promptly modify the LEGAL file in all\r
+          copies Contributor makes available thereafter and shall take\r
+          other steps (such as notifying appropriate mailing lists or\r
+          newsgroups) reasonably calculated to inform those who received\r
+          the Covered Code that new knowledge has been obtained.\r
+\r
+      (b) Contributor APIs.\r
+\r
+          If Contributor's Modifications include an application\r
+          programming interface and Contributor has knowledge of patent\r
+          licenses which are reasonably necessary to implement that\r
+          API, Contributor must also include this information in the\r
+          LEGAL file.\r
+\r
+      (c)   Representations.\r
+\r
+          Contributor represents that, except as disclosed pursuant to\r
+          Section 3.4(a) above, Contributor believes that Contributor's\r
+          Modifications are Contributor's original creation(s) and/or\r
+          Contributor has sufficient rights to grant the rights conveyed\r
+          by this License.\r
+\r
+   3.5. Required Notices.\r
+\r
+      You must duplicate the notice in Exhibit A in each file of the\r
+      Source Code.  If it is not possible to put such notice in a\r
+      particular Source Code file due to its structure, then You must\r
+      include such notice in a location (such as a relevant directory)\r
+      where a user would be likely to look for such a notice.  If You\r
+      created one or more Modification(s) You may add your name as a\r
+      Contributor to the notice described in Exhibit A.  You must also\r
+      duplicate this License in any documentation for the Source Code\r
+      where You describe recipients' rights or ownership rights relating\r
+      to Covered Code.  You may choose to offer, and to charge a fee for,\r
+      warranty, support, indemnity or liability obligations to one or\r
+      more recipients of Covered Code. However, You may do so only on\r
+      Your own behalf, and not on behalf of the Initial Developer or\r
+      any Contributor.\r
+\r
+      You must make it absolutely clear than any such warranty, support,\r
+      indemnity or liability obligation is offered by You alone, and\r
+      You hereby agree to indemnify the Initial Developer and every\r
+      Contributor for any liability incurred by the Initial Developer\r
+      or such Contributor as a result of warranty, support, indemnity\r
+      or liability terms You offer.\r
+\r
+   3.6. Distribution of Executable Versions.\r
+\r
+      You may distribute Covered Code in Executable form only if the\r
+      requirements of Section 3.1-3.5 have been met for that Covered Code,\r
+      and if You include a notice stating that the Source Code version\r
+      of the Covered Code is available under the terms of this License,\r
+      including a description of how and where You have fulfilled the\r
+      obligations of Section 3.2.  The notice must be conspicuously\r
+      included in any notice in an Executable version, related\r
+      documentation or collateral in which You describe recipients'\r
+      rights relating to the Covered Code. You may distribute the\r
+      Executable version of Covered Code or ownership rights under a\r
+      license of Your choice, which may contain terms different from\r
+      this License, provided that You are in compliance with the terms\r
+      of this License and that the license for the Executable version\r
+      does not attempt to limit or alter the recipient's rights in the\r
+      Source Code version from the rights set forth in this License.\r
+      If You distribute the Executable version under a different license\r
+      You must make it absolutely clear that any terms which differ\r
+      from this License are offered by You alone, not by the Initial\r
+      Developer or any Contributor.  You hereby agree to indemnify the\r
+      Initial Developer and every Contributor for any liability incurred\r
+      by the Initial Developer or such Contributor as a result of any\r
+      such terms You offer.\r
+\r
+   3.7. Larger Works.\r
+\r
+      You may create a Larger Work by combining Covered Code with other\r
+      code not governed by the terms of this License and distribute the\r
+      Larger Work as a single product. In such a case, You must make sure\r
+      the requirements of this License are fulfilled for the Covered Code.\r
+\r
+4. Inability to Comply Due to Statute or Regulation.\r
+\r
+   If it is impossible for You to comply with any of the terms of this\r
+   License with respect to some or all of the Covered Code due to statute,\r
+   judicial order, or regulation then You must: (a) comply with the terms\r
+   of this License to the maximum extent possible; and (b) describe the\r
+   limitations and the code they affect. Such description must be included\r
+   in the LEGAL file described in Section 3.4 and must be included with\r
+   all distributions of the Source Code. Except to the extent prohibited\r
+   by statute or regulation, such description must be sufficiently\r
+   detailed for a recipient of ordinary skill to be able to understand it.\r
+\r
+5. Application of this License.\r
+\r
+   This License applies to code to which the Initial Developer has\r
+   attached the notice in Exhibit A and to related Covered Code.\r
+\r
+6. Versions of the License.\r
+\r
+   6.1. New Versions.\r
+\r
+      Netscape Communications Corporation ("Netscape") may publish\r
+      revised and/or new versions of the License from time to time. Each\r
+      version will be given a distinguishing version number.\r
+\r
+   6.2. Effect of New Versions.\r
+\r
+      Once Covered Code has been published under a particular version of\r
+      the License, You may always continue to use it under the terms of\r
+      that version. You may also choose to use such Covered Code under\r
+      the terms of any subsequent version of the License published by\r
+      Netscape. No one other than Netscape has the right to modify the\r
+      terms applicable to Covered Code created under this License.\r
+\r
+   6.3. Derivative Works.\r
+\r
+      If You create or use a modified version of this License (which\r
+      you may only do in order to apply it to code which is not already\r
+      Covered Code governed by this License), You must (a) rename Your\r
+      license so that the phrases "Mozilla", "MOZILLAPL", "MOZPL",\r
+      "Netscape", "MPL", "NPL" or any confusingly similar phrase\r
+      do not appear in your license (except to note that your license\r
+      differs from this License) and (b) otherwise make it clear that\r
+      Your version of the license contains terms which differ from the\r
+      Mozilla Public License and Netscape Public License. (Filling in\r
+      the name of the Initial Developer, Original Code or Contributor\r
+      in the notice described in Exhibit A shall not of themselves be\r
+      deemed to be modifications of this License.)\r
+\r
+7. DISCLAIMER OF WARRANTY.\r
+\r
+   COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS"\r
+   BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED,\r
+   INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE\r
+   IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR\r
+   NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE\r
+   OF THE COVERED CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE\r
+   DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER\r
+   CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR\r
+   CORRECTION.  THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART\r
+   OF THIS LICENSE.  NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER\r
+   EXCEPT UNDER THIS DISCLAIMER.\r
+\r
+8. TERMINATION.\r
+\r
+   8.1.  This License and the rights granted hereunder will terminate\r
+      automatically if You fail to comply with terms herein and fail to\r
+      cure such breach within 30 days of becoming aware of the breach. All\r
+      sublicenses to the Covered Code which are properly granted shall\r
+      survive any termination of this License. Provisions which, by\r
+      their nature, must remain in effect beyond the termination of this\r
+      License shall survive.\r
+\r
+   8.2.  If You initiate litigation by asserting a patent infringement claim\r
+      (excluding declatory judgment actions) against Initial Developer or\r
+      a Contributor (the Initial Developer or Contributor against whom You\r
+      file such action is referred to as "Participant")  alleging that:\r
+\r
+      (a) such Participant's Contributor Version directly or indirectly\r
+          infringes any patent, then any and all rights granted by\r
+          such Participant to You under Sections 2.1 and/or 2.2 of this\r
+          License shall, upon 60 days notice from Participant terminate\r
+          prospectively, unless if within 60 days after receipt of\r
+          notice You either: (i)  agree in writing to pay Participant\r
+          a mutually agreeable reasonable royalty for Your past and\r
+          future use of Modifications made by such Participant, or (ii)\r
+          withdraw Your litigation claim with respect to the Contributor\r
+          Version against such Participant.  If within 60 days of notice,\r
+          a reasonable royalty and payment arrangement are not mutually\r
+          agreed upon in writing by the parties or the litigation claim\r
+          is not withdrawn, the rights granted by Participant to You\r
+          under Sections 2.1 and/or 2.2 automatically terminate at the\r
+          expiration of the 60 day notice period specified above.\r
+\r
+      (b) any software, hardware, or device, other than such Participant's\r
+          Contributor Version, directly or indirectly infringes any\r
+          patent, then any rights granted to You by such Participant\r
+          under Sections 2.1(b) and 2.2(b) are revoked effective as of\r
+          the date You first made, used, sold, distributed, or had made,\r
+          Modifications made by that Participant.\r
+\r
+   8.3.  If You assert a patent infringement claim against Participant\r
+      alleging that such Participant's Contributor Version directly\r
+      or indirectly infringes any patent where such claim is resolved\r
+      (such as by license or settlement) prior to the initiation of\r
+      patent infringement litigation, then the reasonable value of the\r
+      licenses granted by such Participant under Sections 2.1 or 2.2\r
+      shall be taken into account in determining the amount or value of\r
+      any payment or license.\r
+\r
+   8.4.  In the event of termination under Sections 8.1 or 8.2 above,  all\r
+      end user license agreements (excluding distributors and resellers)\r
+      which have been validly granted by You or any distributor hereunder\r
+      prior to termination shall survive termination.\r
+\r
+9. LIMITATION OF LIABILITY.\r
+\r
+   UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT\r
+   (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL\r
+   DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,\r
+   OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR\r
+   ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY\r
+   CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,\r
+   WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER\r
+   COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN\r
+   INFORMED OF THE POSSIBILITY OF SUCH DAMAGES.  THIS LIMITATION OF\r
+   LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY\r
+   RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE\r
+   LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE\r
+   EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES,\r
+   SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.\r
+\r
+10. U.S. GOVERNMENT END USERS.\r
+\r
+   The Covered Code is a "commercial item," as that term is defined\r
+   in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer\r
+   software" and "commercial computer software documentation," as\r
+   such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent\r
+   with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4\r
+   (June 1995), all U.S. Government End Users acquire Covered Code with\r
+   only those rights set forth herein.\r
+\r
+11. MISCELLANEOUS.\r
+\r
+   This License represents the complete agreement concerning subject\r
+   matter hereof. If any provision of this License is held to be\r
+   unenforceable, such provision shall be reformed only to the extent\r
+   necessary to make it enforceable.\r
+\r
+   This License shall be governed by California law provisions (except to\r
+   the extent applicable law, if any, provides otherwise), excluding its\r
+   conflict-of-law provisions. With respect to disputes in which at least\r
+   one party is a citizen of, or an entity chartered or registered to do\r
+   business in the United States of America, any litigation relating to\r
+   this License shall be subject to the jurisdiction of the Federal Courts\r
+   of the Northern District of California, with venue lying in Santa\r
+   Clara County, California, with the losing party responsible for costs,\r
+   including without limitation, court costs and reasonable attorneys'\r
+   fees and expenses. The application of the United Nations Convention on\r
+   Contracts for the International Sale of Goods is expressly excluded.\r
+   Any law or regulation which provides that the language of a contract\r
+   shall be construed against the drafter shall not apply to this License.\r
+\r
+12. RESPONSIBILITY FOR CLAIMS.\r
+\r
+   As between Initial Developer and the Contributors, each party is\r
+   responsible for claims and damages arising, directly or indirectly,\r
+   out of its utilization of rights under this License and You agree\r
+   to work with Initial Developer and Contributors to distribute such\r
+   responsibility on an equitable basis.  Nothing herein is intended or\r
+   shall be deemed to constitute any admission of liability.\r
+\r
+13. MULTIPLE-LICENSED CODE.\r
+\r
+   Initial Developer may designate portions of the Covered Code\r
+   as Multiple-Licensed.  Multiple-Licensed means that the Initial\r
+   Developer permits you to utilize portions of the Covered Code under\r
+   Your choice of the MPL or the alternative licenses, if any, specified\r
+   by the Initial Developer in the file described in Exhibit A.\r
+\r
+\r
+EXHIBIT A -Mozilla Public License.\r
+\r
+   ``The contents of this file are subject to the Mozilla Public License\r
+   Version 1.1 (the "License"); you may not use this file except in\r
+   compliance with the License. You may obtain a copy of the License at\r
+\r
+   http://www.mozilla.org/MPL/\r
+\r
+   Software distributed under the License is distributed on an "AS IS"\r
+   basis, WITHOUT WARRANTY OF\r
+\r
+   ANY KIND, either express or implied. See the License for the specific\r
+   language governing rights and limitations under the License.\r
+\r
+   The Original Code is ______________________________________.\r
+\r
+   The Initial Developer of the Original Code is ________________________.\r
+   Portions created by\r
+\r
+   ______________________ are Copyright (C) ______\r
+   _______________________.\r
+   All Rights Reserved.\r
+\r
+   Contributor(s): ______________________________________.\r
+\r
+   Alternatively, the contents of this file may be used under the terms of\r
+   the _____ license (the  [___] License), in which case the provisions of\r
+   [______] License are applicable  instead of those above.  If you wish\r
+   to allow use of your version of this file only under the terms of the\r
+   [____] License and not to allow others to use your version of this\r
+   file under the MPL, indicate your decision by deleting  the provisions\r
+   above and replace  them with the notice and other provisions required\r
+   by the [___] License.  If you do not delete the provisions above,\r
+   a recipient may use your version of this file under either the MPL\r
+   or the [___] License."\r
+\r
+   [NOTE: The text of this Exhibit A may differ slightly from the text\r
+   of the notices in the Source Code files of the Original Code. You\r
+   should use the text of this Exhibit A rather than the text found in\r
+   the Original Code Source Code for Your Modifications.]\r
+\r
+\r
+\r
+===========================================================================\r
+\r
+To the extent any open source components are licensed under the\r
+GPL and/or LGPL, or other similar licenses that require the\r
+source code and/or modifications to source code to be made\r
+available (as would be noted above), you may obtain a copy of\r
+the source code corresponding to the binaries for such open\r
+source components and modifications thereto, if any, (the\r
+"Source Files"), by downloading the Source Files from Pivotal's website at\r
+http://www.pivotal.io/open-source, or by sending a request, \r
+with your name and address to: Pivotal Software, Inc., 3496 Deer Creek Rd, \r
+Palo Alto, CA 94304, Attention: General Counsel. All such requests should \r
+clearly specify: OPEN SOURCE FILES REQUEST, Attention General Counsel. \r
+Pivotal shall mail a copy of the Source Files to you on a CD or equivalent physical medium. \r
+This offer to obtain a copy of the Source Files is valid for three\r
+years from the date you acquired this Software product. \r
+Alternatively, the Source Files may accompany the Pivotal product.\r
+\r
+\r
+[RABBITJMS146GASS110315]
\ No newline at end of file
diff --git a/deps/rabbitmq_jms_topic_exchange/Makefile b/deps/rabbitmq_jms_topic_exchange/Makefile
new file mode 100644 (file)
index 0000000..d25aaf3
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_jms_topic_exchange
+
+DEPS = rabbit_common rabbit
+TEST_DEPS = rabbitmq_ct_helpers amqp_client
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_jms_topic_exchange/README.md b/deps/rabbitmq_jms_topic_exchange/README.md
new file mode 100644 (file)
index 0000000..9d01c9a
--- /dev/null
@@ -0,0 +1,51 @@
+# RabbitMQ JMS Topic Exchange Plugin
+
+## Overview
+
+This plugin adds server-side support for RabbitMQ JMS client. All JMS-related
+projects are in the process of being open sourced by the RabbitMQ team
+and **should not be used unless the process is complete and announced**.
+
+This plugin is designed to work with the JMS Client for RabbitMQ. It
+supports JMS topic routing and selection based on JMS SQL selection
+rules.
+
+This implementation is based upon the Java Messaging Service
+Specification Version 1.1, see [The JMS
+Specs](http://www.oracle.com/technetwork/java/docs-136352.html) for a
+copy of that specification.
+
+## Design
+
+The plugin this generates is a user-written exchange type for RabbitMQ
+client use. The exchange type name is "`x_jms_topic`" but this is _not_
+a topic exchange. Instead it works together with a standard topic
+exchange to provide the JMS topic selection function.
+
+When JMS Selectors are used on a Topic Destination consumer, the
+destination (queue) is bound to an exchange of type `x_jms_topic`, with
+arguments that indicate what the selection criteria are. The
+`x_jms_topic` exchange is, in turn, bound to the standard Topic Exchange
+used by JMS messaging (this uses the RabbitMQ exchange-to-exchange
+binding extension to the AMQP 0-9-1 protocol).
+
+In this way, normal topic routing can occur, with the overhead of
+selection only applying when selection is used, and _after_ the routing
+and filtering implied by the topic name.
+
+## Building From Source
+
+Building is no different from [building other RabbitMQ plugins](http://www.rabbitmq.com/plugin-development.html).
+
+TL;DR:
+
+    git clone https://github.com/rabbitmq/rabbitmq-jms-topic-exchange.git
+    cd rabbitmq-jms-topic-exchange
+    make -j dist
+    ls plugins/*
+    
+## Copyright and License
+
+(c) Pivotal Software Inc., 2007-2016.
+
+See [LICENSE](./LICENSE) for license information.
diff --git a/deps/rabbitmq_jms_topic_exchange/erlang.mk b/deps/rabbitmq_jms_topic_exchange/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_jms_topic_exchange/include/rabbit_jms_topic_exchange.hrl b/deps/rabbitmq_jms_topic_exchange/include/rabbit_jms_topic_exchange.hrl
new file mode 100644 (file)
index 0000000..61c5ddd
--- /dev/null
@@ -0,0 +1,48 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2012, 2013 Pivotal Software, Inc.  All rights reserved.
+%% -----------------------------------------------------------------------------
+
+%% JMS on Rabbit Topic Selector Exchange plugin definitions
+
+%% -----------------------------------------------------------------------------
+%% User-defined exchange type name
+-define(X_TYPE_NAME, <<"x-jms-topic">>).
+
+%% -----------------------------------------------------------------------------
+%% mnesia database records
+-define(JMS_TOPIC_TABLE, x_jms_topic_table).
+-define(JMS_TOPIC_RECORD, x_jms_topic_xs).
+
+%% Key is x_name -- the exchange name
+-record(?JMS_TOPIC_RECORD, {x_name, x_selection_policy = undefined, x_selector_funs}).
+%% fields:
+%%  x_selector_funs
+%%      a partial map (`dict`) of binding functions:
+%%          dict: RoutingKey x DestName -/-> BindingSelectorFun
+%%      (there is no default, but an empty map will be initially inserted)
+%%      where a BindingSelectorFun has the signature:
+%%          bsf : Headers -> boolean
+%%  x_selection_policy
+%%      not used, retained for backwards compatibility of db records.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Name of arg on binding used to specify erlang term -- string type
+%%      private static final String RJMS_COMPILED_SELECTOR_ARG = "rjms_erlang_selector";
+%% in JMS Client.
+-define(RJMS_COMPILED_SELECTOR_ARG, <<"rjms_erlang_selector">>).
+%% -----------------------------------------------------------------------------
diff --git a/deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk b/deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl b/deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl
new file mode 100644 (file)
index 0000000..73a7992
--- /dev/null
@@ -0,0 +1,302 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2012, 2013 Pivotal Software, Inc.  All rights reserved.
+%% -----------------------------------------------------------------------------
+
+%% JMS on Rabbit Selector Exchange plugin
+
+%% -----------------------------------------------------------------------------
+-module(rabbit_jms_topic_exchange).
+
+-behaviour(rabbit_exchange_type).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include("rabbit_jms_topic_exchange.hrl").
+
+%% Rabbit exchange type functions:
+-export([ description/0
+        , serialise_events/0
+        , route/2
+        , validate/1
+        , create/2
+        , delete/3
+        , validate_binding/2
+        , add_binding/3
+        , remove_bindings/3
+        , assert_args_equivalence/2
+        , policy_changed/2 ]).
+
+%% Initialisation of database function:
+-export([setup_db_schema/0]).
+
+%%----------------------------------------------------------------------------
+
+%% Register exchange type
+-rabbit_boot_step({ ?MODULE
+                  , [ {description, "exchange type JMS topic selector"}
+                    , {mfa, {rabbit_registry, register, [exchange, ?X_TYPE_NAME, ?MODULE]}}
+                    , {cleanup, {rabbit_registry, unregister, [exchange, ?X_TYPE_NAME]}}
+                    , {requires, rabbit_registry}
+                    , {enables, kernel_ready} ] }).
+
+%% Initialise database
+-rabbit_boot_step({ rabbit_jms_topic_exchange_mnesia
+                  , [ {description, "database exchange type JMS topic selector"}
+                    , {mfa, {?MODULE, setup_db_schema, []}}
+                    , {requires, database}
+                    , {enables, external_infrastructure} ] }).
+
+%%----------------------------------------------------------------------------
+
+% Initialise database table for all exchanges of type <<"x-jms-topic">>
+setup_db_schema() ->
+  case mnesia:create_table( ?JMS_TOPIC_TABLE
+                          , [ {attributes, record_info(fields, ?JMS_TOPIC_RECORD)}
+                            , {record_name, ?JMS_TOPIC_RECORD}
+                            , {type, set} ]
+                          ) of
+    {atomic, ok} -> ok;
+    {aborted, {already_exists, ?JMS_TOPIC_TABLE}} -> ok
+  end.
+
+%%----------------------------------------------------------------------------
+%% R E F E R E N C E   T Y P E   I N F O R M A T I O N
+
+%% -type(binding() ::
+%%         #binding{source      :: rabbit_exchange:name(),
+%%                  destination :: binding_destination(),
+%%                  key         :: rabbit_binding:key(),
+%%                  args        :: rabbit_framing:amqp_table()}).
+%%
+%% -type(exchange() ::
+%%         #exchange{name        :: rabbit_exchange:name(),
+%%                   type        :: rabbit_exchange:type(),
+%%                   durable     :: boolean(),
+%%                   auto_delete :: boolean(),
+%%                   arguments   :: rabbit_framing:amqp_table()}).
+%%
+%% -type(amqp_field_type() ::
+%%       'longstr' | 'signedint' | 'decimal' | 'timestamp' |
+%%       'table' | 'byte' | 'double' | 'float' | 'long' |
+%%       'short' | 'bool' | 'binary' | 'void' | 'array').
+
+%%----------------------------------------------------------------------------
+%% E X P O R T E D   E X C H A N G E   B E H A V I O U R
+
+% Exchange description
+description() -> [ {name, <<"jms-selector">>}
+                 , {description, <<"JMS selector exchange">>} ].
+
+% Binding event serialisation
+serialise_events() -> false.
+
+% Route messages
+route( #exchange{name = XName}
+     , #delivery{message = #basic_message{content = MessageContent, routing_keys = RKs}}
+     ) ->
+  BindingFuns = get_binding_funs_x(XName),
+  match_bindings(XName, RKs, MessageContent, BindingFuns).
+
+
+% Before exchange declaration
+validate(_X) -> ok.
+
+% After exchange declaration and recovery
+create(transaction, #exchange{name = XName}) ->
+  add_initial_record(XName);
+create(_Tx, _X) ->
+  ok.
+
+% Delete an exchange
+delete(transaction, #exchange{name = XName}, _Bs) ->
+  delete_state(XName),
+  ok;
+delete(_Tx, _X, _Bs) ->
+  ok.
+
+% Before add binding
+validate_binding(_X, _B) -> ok.
+
+% A new binding has ben added or recovered
+add_binding( Tx
+           , #exchange{name = XName}
+           , #binding{key = BindingKey, destination = Dest, args = Args}
+           ) ->
+  Selector = get_string_arg(Args, ?RJMS_COMPILED_SELECTOR_ARG),
+  BindGen = generate_binding_fun(Selector),
+  case {Tx, BindGen} of
+    {transaction, {ok, BindFun}} ->
+      add_binding_fun(XName, {{BindingKey, Dest}, BindFun});
+    {none, {error, _}} ->
+      parsing_error(XName, Selector, Dest);
+    _ ->
+      ok
+  end,
+  ok.
+
+% Binding removal
+remove_bindings( transaction
+               , #exchange{name = XName}
+               , Bindings
+               ) ->
+  remove_binding_funs(XName, Bindings),
+  ok;
+remove_bindings(_Tx, _X, _Bs) ->
+  ok.
+
+% Exchange argument equivalence
+assert_args_equivalence(X, Args) ->
+  rabbit_exchange:assert_args_equivalence(X, Args).
+
+% Policy change notifications ignored
+policy_changed(_X1, _X2) -> ok.
+
+%%----------------------------------------------------------------------------
+%% P R I V A T E   F U N C T I O N S
+
+% Get a string argument from the args or arguments parameters
+get_string_arg(Args, ArgName) -> get_string_arg(Args, ArgName, error).
+
+get_string_arg(Args, ArgName, Default) ->
+  case rabbit_misc:table_lookup(Args, ArgName) of
+    {longstr, BinVal} -> binary_to_list(BinVal);
+    _ -> Default
+  end.
+
+% Match bindings for the current message
+match_bindings( XName, _RoutingKeys, MessageContent, BindingFuns) ->
+  MessageHeaders = get_headers(MessageContent),
+  rabbit_router:match_bindings( XName
+                              , fun(#binding{key = Key, destination = Dest}) ->
+                                  binding_fun_match({Key, Dest}, MessageHeaders, BindingFuns)
+                                end
+                              ).
+
+% Select binding function from Funs dictionary, apply it to Headers and return result (true|false)
+binding_fun_match(DictKey, Headers, FunsDict) ->
+  case dict:find(DictKey, FunsDict) of
+    {ok, Fun} when is_function(Fun, 1) -> Fun(Headers);
+    error                              -> false          % do not match if no function found
+  end.
+
+% get Headers from message content
+get_headers(Content) ->
+  case (Content#content.properties)#'P_basic'.headers of
+    undefined -> [];
+    H         -> rabbit_misc:sort_field_table(H)
+  end.
+
+% generate the function that checks the message against the selector
+generate_binding_fun(ERL) ->
+  case decode_term(ERL) of
+    {error, _}    -> error;
+    {ok, ErlTerm} -> check_fun(ErlTerm)
+  end.
+
+% build checking function from compiled expression
+check_fun(CompiledExp) ->
+  { ok,
+    fun(Headers) ->
+      selector_match(CompiledExp, Headers)
+    end
+  }.
+
+% get an erlang term from a string
+decode_term(Str) ->
+  try
+    {ok, Ts, _} = erl_scan:string(Str),
+    {ok, Term} = erl_parse:parse_term(Ts),
+    {ok, Term}
+  catch
+    Err -> {error, {invalid_erlang_term, Err}}
+  end.
+
+% Evaluate the selector and check against the Headers
+selector_match(Selector, Headers) ->
+  case sjx_evaluator:evaluate(Selector, Headers) of
+    true -> true;
+    _    -> false
+  end.
+
+% get binding funs from state (using dirty_reads)
+get_binding_funs_x(XName) ->
+  mnesia:async_dirty(
+    fun() ->
+      #?JMS_TOPIC_RECORD{x_selector_funs = BindingFuns} = read_state(XName),
+      BindingFuns
+    end,
+    []
+  ).
+
+add_initial_record(XName) ->
+  write_state_fun(XName, dict:new()).
+
+% add binding fun to binding fun dictionary
+add_binding_fun(XName, BindingKeyAndFun) ->
+  #?JMS_TOPIC_RECORD{x_selector_funs = BindingFuns} = read_state_for_update(XName),
+  write_state_fun(XName, put_item(BindingFuns, BindingKeyAndFun)).
+
+% remove binding funs from binding fun dictionary
+remove_binding_funs(XName, Bindings) ->
+  BindingKeys = [ {BindingKey, DestName} || #binding{key = BindingKey, destination = DestName} <- Bindings ],
+  #?JMS_TOPIC_RECORD{x_selector_funs = BindingFuns} = read_state_for_update(XName),
+  write_state_fun(XName, remove_items(BindingFuns, BindingKeys)).
+
+% add an item to the dictionary of binding functions
+put_item(Dict, {Key, Item}) -> dict:store(Key, Item, Dict).
+
+% remove a list of keyed items from the dictionary, by key
+remove_items(Dict, []) -> Dict;
+remove_items(Dict, [Key | Keys]) -> remove_items(dict:erase(Key, Dict), Keys).
+
+% delete all the state saved for this exchange
+delete_state(XName) ->
+  mnesia:delete(?JMS_TOPIC_TABLE, XName, write).
+
+% Basic read for update
+read_state_for_update(XName) -> read_state(XName, write).
+
+% Basic read
+read_state(XName) -> read_state(XName, read).
+
+% Lockable read
+read_state(XName, Lock) ->
+  case mnesia:read(?JMS_TOPIC_TABLE, XName, Lock) of
+    [Rec] -> Rec;
+    _     -> exchange_state_corrupt_error(XName)
+  end.
+
+% Basic write
+write_state_fun(XName, BFuns) ->
+  mnesia:write( ?JMS_TOPIC_TABLE
+              , #?JMS_TOPIC_RECORD{x_name = XName, x_selector_funs = BFuns}
+              , write ).
+
+%%----------------------------------------------------------------------------
+%% E R R O R S
+
+% state error
+exchange_state_corrupt_error(#resource{name = XName}) ->
+  rabbit_misc:protocol_error( internal_error
+                            , "exchange named '~s' has no saved state or incorrect saved state"
+                            , [XName] ).
+
+% parsing error
+parsing_error(#resource{name = XName}, S, #resource{name = DestName}) ->
+  rabbit_misc:protocol_error( precondition_failed
+                            , "cannot parse selector '~p' binding destination '~s' to exchange '~s'"
+                            , [S, DestName, XName] ).
+
+%%----------------------------------------------------------------------------
diff --git a/deps/rabbitmq_jms_topic_exchange/src/rabbitmq_jms_topic_exchange.app.src b/deps/rabbitmq_jms_topic_exchange/src/rabbitmq_jms_topic_exchange.app.src
new file mode 100644 (file)
index 0000000..8c3e151
--- /dev/null
@@ -0,0 +1,8 @@
+{ application, rabbitmq_jms_topic_exchange
+, [ {description, "RabbitMQ JMS topic selector exchange plugin"}
+  , {vsn, "3.6.6"}
+  , {modules, []}
+  , {registered, []}
+  , {applications, [kernel, stdlib, rabbit_common, rabbit, mnesia]}
+  ]
+}.
diff --git a/deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl b/deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl
new file mode 100644 (file)
index 0000000..e41d079
--- /dev/null
@@ -0,0 +1,178 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2012, 2013 Pivotal Software, Inc.  All rights reserved.
+%% -----------------------------------------------------------------------------
+%% Derived from works which were:
+%% Copyright (c) 2002, 2012 Tim Watson (watson.timothy@gmail.com)
+%% Copyright (c) 2012, 2013 Steve Powell (Zteve.Powell@gmail.com)
+%% -----------------------------------------------------------------------------
+
+%% Evaluate an SQL expression for filtering purposes
+
+%% -----------------------------------------------------------------------------
+
+-module(sjx_evaluator).
+
+-export([evaluate/2]).
+%% Evaluation function
+%%
+%%   Given Headers (a list of keyed typed values), and a
+%%   parsed SQL string, evaluate the truth or falsity of the expression.
+%%
+%%   If an identifier is absent from Headers, or the types do not match the comparisons, the
+%%   expression will evaluate to false.
+
+-type itemname() :: binary().
+-type itemtype() ::
+      'longstr' | 'signedint' | 'byte' | 'double' | 'float' | 'long' | 'short' | 'bool'.
+-type itemvalue() :: any().
+
+-type tableitem() :: { itemname(), itemtype(), itemvalue() }.
+-type table() :: list(tableitem()).
+
+-type expression() :: any().
+
+-spec evaluate(expression(), table()) -> true | false | error.
+
+
+evaluate( true,                           _Headers ) -> true;
+evaluate( false,                          _Headers ) -> false;
+
+evaluate( {'not', Exp },                   Headers ) -> not3(evaluate(Exp, Headers));
+evaluate( {'ident', Ident },               Headers ) -> lookup_value(Headers, Ident);
+evaluate( {'is_null', Exp },               Headers ) -> val_of(Exp, Headers) =:= undefined;
+evaluate( {'not_null', Exp },              Headers ) -> val_of(Exp, Headers) =/= undefined;
+evaluate( { Op, Exp },                     Headers ) -> do_una_op(Op, evaluate(Exp, Headers));
+
+evaluate( {'and', Exp1, Exp2 },            Headers ) -> and3(evaluate(Exp1, Headers), evaluate(Exp2, Headers));
+evaluate( {'or', Exp1, Exp2 },             Headers ) -> or3(evaluate(Exp1, Headers), evaluate(Exp2, Headers));
+evaluate( {'like', LHS, Patt },            Headers ) -> isLike(val_of(LHS, Headers), Patt);
+evaluate( {'not_like', LHS, Patt },        Headers ) -> not3(isLike(val_of(LHS, Headers), Patt));
+evaluate( { Op, Exp, {range, From, To} },  Headers ) -> evaluate({ Op, Exp, From, To }, Headers);
+evaluate( {'between', Exp, From, To},           Hs ) -> between(evaluate(Exp, Hs), evaluate(From, Hs), evaluate(To, Hs));
+evaluate( {'not_between', Exp, From, To},       Hs ) -> not3(between(evaluate(Exp, Hs), evaluate(From, Hs), evaluate(To, Hs)));
+evaluate( { Op, LHS, RHS },                Headers ) -> do_bin_op(Op, evaluate(LHS, Headers), evaluate(RHS, Headers));
+
+evaluate( Value,                          _Headers ) -> Value.
+
+not3(true ) -> false;
+not3(false) -> true;
+not3(_    ) -> undefined.
+
+and3(true,  true ) -> true;
+and3(false, _    ) -> false;
+and3(_,     false) -> false;
+and3(_,     _    ) -> undefined.
+
+or3(false, false) -> false;
+or3(true,  _    ) -> true;
+or3(_,     true ) -> true;
+or3(_,     _    ) -> undefined.
+
+do_una_op(_, undefined)  -> undefined;
+do_una_op('-', E) -> -E;
+do_una_op('+', E) -> +E;
+do_una_op(_,   _) -> error.
+
+do_bin_op(_, undefined, _)  -> undefined;
+do_bin_op(_, _, undefined ) -> undefined;
+do_bin_op('=' , L, R) -> L == R;
+do_bin_op('<>', L, R) -> L /= R;
+do_bin_op('>' , L, R) -> L > R;
+do_bin_op('<' , L, R) -> L < R;
+do_bin_op('>=', L, R) -> L >= R;
+do_bin_op('<=', L, R) -> L =< R;
+do_bin_op('in', L, R) -> isIn(L, R);
+do_bin_op('not_in', L, R) -> not isIn(L, R);
+do_bin_op('+' , L, R) -> L + R;
+do_bin_op('-' , L, R) -> L - R;
+do_bin_op('*' , L, R) -> L * R;
+do_bin_op('/' , L, R) when R /= 0 -> L / R;
+do_bin_op('/' , L, R) when L > 0 andalso R == 0 -> plus_infinity;
+do_bin_op('/' , L, R) when L < 0 andalso R == 0 -> minus_infinity;
+do_bin_op('/' , L, R) when L == 0 andalso R == 0 -> nan;
+do_bin_op(_,_,_) -> error.
+
+isLike(undefined, _Patt) -> undefined;
+isLike(L, {regex, MP}) -> patt_match(L, MP);
+isLike(L, {Patt, Esc}) -> patt_match(L, pattern_of(Patt, Esc)).
+
+patt_match(L, MP) ->
+  BS = byte_size(L),
+  case re:run(L, MP, [{capture, first}]) of
+    {match, [{0, BS}]} -> true;
+    _                  -> false
+  end.
+
+isIn(_L, []   ) -> false;
+isIn( L, [L|_]) -> true;
+isIn( L, [_|R]) -> isIn(L,R).
+
+val_of({'ident', Ident}, Hs) -> lookup_value(Hs, Ident);
+val_of(Value,           _Hs) -> Value.
+
+between(E, F, T) when E =:= undefined orelse F =:= undefined orelse T =:= undefined -> undefined;
+between(Value, Lo, Hi) -> Lo =< Value andalso Value =< Hi.
+
+lookup_value(Table, Key) ->
+  case lists:keyfind(Key, 1, Table) of
+    {_, longstr,   Value} -> Value;
+    {_, signedint, Value} -> Value;
+    {_, float,     Value} -> Value;
+    {_, double,    Value} -> Value;
+    {_, byte,      Value} -> Value;
+    {_, short,     Value} -> Value;
+    {_, long,      Value} -> Value;
+    {_, bool,      Value} -> Value;
+    false                 -> undefined
+  end.
+
+pattern_of(S, Esc) -> compile_re(gen_re(binary_to_list(S), Esc)).
+
+gen_re(S, <<Ch>>   ) -> convert(S, [], Ch       );
+gen_re(S, no_escape) -> convert(S, [], no_escape);
+gen_re(_,_) -> error.
+
+convert([],               Acc, _Esc) -> lists:reverse(Acc);
+convert([Esc, Ch | Rest], Acc,  Esc) -> convert(Rest, [escape(Ch) | Acc], Esc);
+convert([$_ | Rest],      Acc,  Esc) -> convert(Rest, [$.         | Acc], Esc);
+convert([$% | Rest],      Acc,  Esc) -> convert(Rest, [".*"       | Acc], Esc);
+convert([Ch | Rest],      Acc,  Esc) -> convert(Rest, [escape(Ch) | Acc], Esc).
+
+escape($.)  -> "\\.";
+escape($*)  -> "\\*";
+escape($+)  -> "\\+";
+escape($?)  -> "\\?";
+escape($^)  -> "\\^";
+escape($=)  -> "\\=";
+escape($!)  -> "\\!";
+escape($:)  -> "\\:";
+escape($$)  -> "\\$";
+escape(${)  -> "\\{";
+escape($})  -> "\\}";
+escape($()  -> "\\(";
+escape($))  -> "\\)";
+escape($|)  -> "\\|";
+escape($[)  -> "\\[";
+escape($])  -> "\\]";
+escape($/)  -> "\\/";
+escape($\\) -> "\\\\";
+escape(Ch)  -> Ch.
+
+compile_re(error) -> error;
+compile_re(MatchMany) ->
+    case re:compile(MatchMany)
+    of  {ok, Rx} -> Rx;
+        _        -> error
+    end.
diff --git a/deps/rabbitmq_management/CODE_OF_CONDUCT.md b/deps/rabbitmq_management/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_management/CONTRIBUTING.md b/deps/rabbitmq_management/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
similarity index 89%
rename from rabbitmq-server/plugins-src/webmachine-wrapper/LICENSE-Apache-Basho
rename to deps/rabbitmq_management/LICENSE-APACHE2-ExplorerCanvas
index e454a52586f29b8ce8a6799163eac1f875e9ac01..d645695673349e3947e8e5ae42332d0ac3164cd7 100644 (file)
 
    END OF TERMS AND CONDITIONS
 
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/deps/rabbitmq_management/LICENSE-BSD-base64js b/deps/rabbitmq_management/LICENSE-BSD-base64js
new file mode 100644 (file)
index 0000000..7073704
--- /dev/null
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2010 Nick Galbreath
+ * http://code.google.com/p/stringencoders/source/browse/#svn/trunk/javascript
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+*/
similarity index 94%
rename from rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/LICENSE
rename to deps/rabbitmq_management/LICENSE-MIT-EJS10
index 1f6200918f7b8b90047ae33eac3c175507dd76b1..f3bdcd8887715f0e5b1421f97c3537ccdaea7882 100644 (file)
@@ -1,6 +1,7 @@
+EJS - Embedded JavaScript
+
+Copyright (c) 2007 Edward Benson 
 
-Copyright (c) 2010, Torbjorn Tornkvist
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
 in the Software without restriction, including without limitation the rights
@@ -19,3 +20,4 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
 
+
diff --git a/deps/rabbitmq_management/LICENSE-MIT-Flot b/deps/rabbitmq_management/LICENSE-MIT-Flot
new file mode 100644 (file)
index 0000000..67f4625
--- /dev/null
@@ -0,0 +1,22 @@
+Copyright (c) 2007-2013 IOLA and Ole Laursen
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/rabbitmq_management/LICENSE-MIT-Sammy060 b/deps/rabbitmq_management/LICENSE-MIT-Sammy060
new file mode 100644 (file)
index 0000000..3debf5a
--- /dev/null
@@ -0,0 +1,25 @@
+Copyright (c) 2008 Aaron Quint, Quirkey NYC, LLC
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+
+
diff --git a/deps/rabbitmq_management/LICENSE-MIT-jQuery164 b/deps/rabbitmq_management/LICENSE-MIT-jQuery164
new file mode 100644 (file)
index 0000000..5a87162
--- /dev/null
@@ -0,0 +1,21 @@
+Copyright (c) 2011 John Resig, http://jquery.com/
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/deps/rabbitmq_management/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_management/LICENSE-MPL-RabbitMQ
new file mode 100644 (file)
index 0000000..e163fcc
--- /dev/null
@@ -0,0 +1,455 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the NPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is RabbitMQ Management Plugin.
+
+     The Initial Developer of the Original Code is GoPivotal, Inc.
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
+
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
diff --git a/deps/rabbitmq_management/Makefile b/deps/rabbitmq_management/Makefile
new file mode 100644 (file)
index 0000000..cc12c49
--- /dev/null
@@ -0,0 +1,31 @@
+PROJECT = rabbitmq_management
+
+DEPS = rabbit_common rabbit amqp_client webmachine rabbitmq_web_dispatch rabbitmq_management_agent
+TEST_DEPS = rabbitmq_ct_helpers
+
+dep_webmachine = git https://github.com/rabbitmq/webmachine.git 6b5210c0ed07159f43222255e05a90bbef6c8cbe
+dep_rabbitmq_web_dispatch = git https://github.com/rabbitmq/rabbitmq-web-dispatch.git stable
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-dist.mk \
+             rabbit_common/mk/rabbitmq-run.mk \
+             rabbit_common/mk/rabbitmq-tools.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
+
+# --------------------------------------------------------------------
+# Distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @echo bin/rabbitmqadmin
+
+prepare-dist::
+       $(verbose) sed 's/%%VSN%%/$(VSN)/' bin/rabbitmqadmin \
+               > $(EZ_DIR)/priv/www/cli/rabbitmqadmin
diff --git a/deps/rabbitmq_management/README.md b/deps/rabbitmq_management/README.md
new file mode 100644 (file)
index 0000000..fa8bbfe
--- /dev/null
@@ -0,0 +1,13 @@
+# RabbitMQ Management Plugin
+
+This plugin provides a management UI and HTTP API for RabbitMQ.
+This plugin is included in the RabbitMQ distribution. To enable
+it, use <href="http://www.rabbitmq.com/man/rabbitmq-plugins.1.man.html">rabbitmq-plugins</a>.
+
+## Documentation
+
+[RabbitMQ management UI documentation](http://www.rabbitmq.com/management.html).
+
+## Continuous Integration
+
+[![Build Status](https://travis-ci.org/rabbitmq/rabbitmq-management.svg?branch=master)](https://travis-ci.org/rabbitmq/rabbitmq-management)
similarity index 87%
rename from rabbitmq-server/plugins-src/rabbitmq-management/bin/rabbitmqadmin
rename to deps/rabbitmq_management/bin/rabbitmqadmin
index f8f2da51aee06b95be04c73946dfdbc05de2bb78..0b329ad07b16854c1bb00657810ee1875bab0837 100755 (executable)
 #   The Original Code is RabbitMQ Management Plugin.
 #
 #   The Initial Developer of the Original Code is GoPivotal, Inc.
-#   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+#   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 
 import sys
 if sys.version_info[0] < 2 or (sys.version_info[0] == 2 and sys.version_info[1] < 6):
-    print("Sorry, rabbitmqadmin requires at least Python 2.6.")
+    print("Sorry, rabbitmqadmin requires at least Python 2.6 (2.7.9 when HTTPS is enabled).")
     sys.exit(1)
 
 from optparse import OptionParser, TitledHelpFormatter
@@ -26,6 +26,8 @@ import base64
 import json
 import os
 import socket
+import ssl
+import traceback
 
 if sys.version_info[0] == 2:
     from ConfigParser import ConfigParser, NoSectionError
@@ -252,6 +254,7 @@ def fmt_usage_stanza(root, verb):
 
 default_options = { "hostname"        : "localhost",
                     "port"            : "15672",
+                    "path_prefix"        : "",
                     "declare_vhost"   : "/",
                     "username"        : "guest",
                     "password"        : "guest",
@@ -291,6 +294,8 @@ def make_parser():
     add("-P", "--port", dest="port",
         help="connect to port PORT",
         metavar="PORT")
+    add("--path-prefix", dest="path_prefix",
+        help="use specific URI path prefix for the RabbitMQ HTTP API (default: blank string)")
     add("-V", "--vhost", dest="vhost",
         help="connect to vhost VHOST [default: all vhosts for list, '/' for declare]",
         metavar="VHOST")
@@ -308,6 +313,10 @@ def make_parser():
         help="PEM format key file for SSL")
     add("--ssl-cert-file", dest="ssl_cert_file",
         help="PEM format certificate file for SSL")
+    add("--ssl-ca-cert-file", dest="ssl_ca_cert_file",
+        help="PEM format CA certificate file for SSL")
+    add("--ssl-disable-hostname-verification", dest="ssl_disable_hostname_verification",
+        help="Disables peer hostname verification", default=False, action="store_true" )
     add("-f", "--format", dest="format",
         help="format for listing commands - one of [" + ", ".join(FORMATS.keys())  + "]")
     add("-S", "--sort", dest="sort", help="sort key for listing queries")
@@ -367,7 +376,10 @@ def make_configuration():
                              (options.node, options.config, error))
         else:
             for key, val in new_conf.items():
-                setattr(options, key, val)
+                if key == 'ssl':
+                    setattr(options, key, val == "True")
+                else:
+                    setattr(options, key, val)
 
     return (options, args)
 
@@ -421,23 +433,46 @@ class Management:
         self.args = args
 
     def get(self, path):
-        return self.http("GET", "/api%s" % path, "")
+        return self.http("GET", "%s/api%s" % (self.options.path_prefix, path), "")
 
     def put(self, path, body):
-        return self.http("PUT", "/api%s" % path, body)
+        return self.http("PUT", "%s/api%s" % (self.options.path_prefix, path), body)
 
     def post(self, path, body):
-        return self.http("POST", "/api%s" % path, body)
+        return self.http("POST", "%s/api%s" % (self.options.path_prefix, path), body)
 
     def delete(self, path):
-        return self.http("DELETE", "/api%s" % path, "")
+        return self.http("DELETE", "%s/api%s" % (self.options.path_prefix, path), "")
+
+    def __initialize_https_connection(self, hostname, port):
+        # Python 2.7.9+
+        if hasattr(ssl, 'create_default_context'):
+            return httplib.HTTPSConnection(hostname, port,
+                                           context = self.__initialize_tls_context())
+        # Python < 2.7.8, note: those versions still have SSLv3 enabled
+        #                       and other limitations. See rabbitmq/rabbitmq-management#225
+        else:
+            print("Warning: rabbitmqadmin requires Python 2.7.9+ when HTTPS is used.")
+            return httplib.HTTPSConnection(hostname, port,
+                                           cert_file = self.options.ssl_cert_file,
+                                           key_file  = self.options.ssl_key_file)
+
+    def __initialize_tls_context(self):
+        # Python 2.7.9+ only
+        ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
+        ssl_ctx.options &= ~ssl.OP_NO_SSLv3
+        ssl_ctx.verify_mode = ssl.CERT_REQUIRED
+        ssl_ctx.check_hostname = not self.options.ssl_disable_hostname_verification
+        ssl_ctx.load_cert_chain(self.options.ssl_cert_file,
+                                self.options.ssl_key_file)
+        if self.options.ssl_ca_cert_file:
+            ssl_ctx.load_verify_locations(self.options.ssl_ca_cert_file)
+        return ssl_ctx
 
     def http(self, method, path, body):
         if self.options.ssl:
-            conn = httplib.HTTPSConnection(self.options.hostname,
-                                           self.options.port,
-                                           self.options.ssl_key_file,
-                                           self.options.ssl_cert_file)
+            conn = self.__initialize_https_connection(self.options.hostname,
+                                                      self.options.port)
         else:
             conn = httplib.HTTPConnection(self.options.hostname,
                                           self.options.port)
@@ -449,6 +484,7 @@ class Management:
         try:
             conn.request(method, path, body, headers)
         except socket.error as e:
+            traceback.print_exc(e)
             die("Could not connect: {0}".format(e))
         resp = conn.getresponse()
         if resp.status == 400:
@@ -525,7 +561,10 @@ class Management:
 
     def invoke_export(self):
         path = self.get_arg()
-        definitions = self.get("/definitions")
+        uri = "/definitions"
+        if self.options.vhost:
+            uri += "/%s" % quote_plus(self.options.vhost)
+        definitions = self.get(uri)
         f = open(path, 'w')
         f.write(definitions)
         f.close()
@@ -537,7 +576,10 @@ class Management:
         f = open(path, 'r')
         definitions = f.read()
         f.close()
-        self.post("/definitions", definitions)
+        uri = "/definitions"
+        if self.options.vhost:
+            uri += "/%s" % quote_plus(self.options.vhost)
+        self.post(uri, definitions)
         self.verbose("Imported definitions for %s from \"%s\""
                      % (self.options.hostname, path))
 
@@ -867,77 +909,77 @@ _rabbitmqadmin()
     fargs="--help --host --port --vhost --username --password --format --depth --sort --sort-reverse"
 
     case "${prev}" in
-       list)
-           COMPREPLY=( $(compgen -W '""" + " ".join(LISTABLE) + """' -- ${cur}) )
+        list)
+            COMPREPLY=( $(compgen -W '""" + " ".join(LISTABLE) + """' -- ${cur}) )
             return 0
             ;;
-       show)
-           COMPREPLY=( $(compgen -W '""" + " ".join(SHOWABLE) + """' -- ${cur}) )
+        show)
+            COMPREPLY=( $(compgen -W '""" + " ".join(SHOWABLE) + """' -- ${cur}) )
             return 0
             ;;
-       declare)
-           COMPREPLY=( $(compgen -W '""" + " ".join(DECLARABLE.keys()) + """' -- ${cur}) )
+        declare)
+            COMPREPLY=( $(compgen -W '""" + " ".join(DECLARABLE.keys()) + """' -- ${cur}) )
             return 0
             ;;
-       delete)
-           COMPREPLY=( $(compgen -W '""" + " ".join(DELETABLE.keys()) + """' -- ${cur}) )
+        delete)
+            COMPREPLY=( $(compgen -W '""" + " ".join(DELETABLE.keys()) + """' -- ${cur}) )
             return 0
             ;;
-       close)
-           COMPREPLY=( $(compgen -W '""" + " ".join(CLOSABLE.keys()) + """' -- ${cur}) )
+        close)
+            COMPREPLY=( $(compgen -W '""" + " ".join(CLOSABLE.keys()) + """' -- ${cur}) )
             return 0
             ;;
-       purge)
-           COMPREPLY=( $(compgen -W '""" + " ".join(PURGABLE.keys()) + """' -- ${cur}) )
+        purge)
+            COMPREPLY=( $(compgen -W '""" + " ".join(PURGABLE.keys()) + """' -- ${cur}) )
             return 0
             ;;
-       export)
-           COMPREPLY=( $(compgen -f ${cur}) )
+        export)
+            COMPREPLY=( $(compgen -f ${cur}) )
             return 0
             ;;
-       import)
-           COMPREPLY=( $(compgen -f ${cur}) )
+        import)
+            COMPREPLY=( $(compgen -f ${cur}) )
             return 0
             ;;
-       help)
+        help)
             opts="subcommands config"
-           COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
+            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
             return 0
             ;;
-       -H)
-           COMPREPLY=( $(compgen -A hostname ${cur}) )
+        -H)
+            COMPREPLY=( $(compgen -A hostname ${cur}) )
             return 0
             ;;
-       --host)
-           COMPREPLY=( $(compgen -A hostname ${cur}) )
+        --host)
+            COMPREPLY=( $(compgen -A hostname ${cur}) )
             return 0
             ;;
-       -V)
+        -V)
             opts="$(rabbitmqadmin -q -f bash list vhosts)"
-           COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
+            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
             return 0
             ;;
-       --vhost)
+        --vhost)
             opts="$(rabbitmqadmin -q -f bash list vhosts)"
-           COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
+            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
             return 0
             ;;
-       -u)
+        -u)
             opts="$(rabbitmqadmin -q -f bash list users)"
-           COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
+            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
             return 0
             ;;
-       --username)
+        --username)
             opts="$(rabbitmqadmin -q -f bash list users)"
-           COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
+            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
             return 0
             ;;
-       -f)
-           COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\"  -- ${cur}) )
+        -f)
+            COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\"  -- ${cur}) )
             return 0
             ;;
-       --format)
-           COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\"  -- ${cur}) )
+        --format)
+            COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\"  -- ${cur}) )
             return 0
             ;;
 
@@ -946,7 +988,7 @@ _rabbitmqadmin()
         key = l[0:len(l) - 1]
         script += "        " + key + """)
             opts="$(rabbitmqadmin -q -f bash list """ + l + """)"
-           COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
+            COMPREPLY=( $(compgen -W "${opts}"  -- ${cur}) )
             return 0
             ;;
 """
diff --git a/deps/rabbitmq_management/erlang.mk b/deps/rabbitmq_management/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 78%
rename from rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt.hrl
rename to deps/rabbitmq_management/include/rabbit_mgmt.hrl
index 43cc67dcb8bfa3d7c80c6a4db21f98482a7f067e..a5f6209e02a92dc70a845d838ab96e4f9ad59ffb 100644 (file)
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
--record(context, {user, password = none}).
+-record(context, {user,
+                  password = none,
+                  impl}). % storage for a context of the resource handler
 -record(range, {first, last, incr}).
--record(stats, {diffs, base}).
 
 -define(AUTH_REALM, "Basic realm=\"RabbitMQ Management\"").
diff --git a/deps/rabbitmq_management/include/rabbit_mgmt_event_collector.hrl b/deps/rabbitmq_management/include/rabbit_mgmt_event_collector.hrl
new file mode 100644 (file)
index 0000000..816365c
--- /dev/null
@@ -0,0 +1,32 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-record(state, {
+          lookups,
+          interval,
+          event_refresh_ref,
+          rates_mode,
+          max_backlog}).
+
+-define(FINE_STATS_TYPES, [channel_queue_stats, channel_exchange_stats,
+                           channel_queue_exchange_stats]).
+
+-define(TABLES, [queue_stats, connection_stats, channel_stats,
+                 consumers_by_queue, consumers_by_channel,
+                 node_stats, node_node_stats,
+                 %% What the previous info item was for any given
+                 %% {queue/channel/connection}
+                 old_stats]).
diff --git a/deps/rabbitmq_management/include/rabbit_mgmt_metrics.hrl b/deps/rabbitmq_management/include/rabbit_mgmt_metrics.hrl
new file mode 100644 (file)
index 0000000..04ec4c1
--- /dev/null
@@ -0,0 +1,211 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-define(DELIVER_GET, [deliver, deliver_no_ack, get, get_no_ack]).
+-define(FINE_STATS, [publish, publish_in, publish_out,
+                     ack, deliver_get, confirm, return_unroutable, redeliver] ++
+            ?DELIVER_GET).
+
+%% Most come from channels as fine stats, but queues emit these directly.
+-define(QUEUE_MSG_RATES, [disk_reads, disk_writes]).
+
+-define(MSG_RATES, ?FINE_STATS ++ ?QUEUE_MSG_RATES).
+
+-define(MSG_RATES_DETAILS, [publish_details, publish_in_details,
+                            publish_out_details, ack_details,
+                            deliver_get_details, confirm_details,
+                            return_unroutable_details, redeliver_details,
+                            deliver_details, deliver_no_ack_details,
+                            get_details, get_no_ack_details,
+                            disk_reads_details, disk_writes_details] ++ ?MSG_RATES).
+
+-define(QUEUE_MSG_COUNTS, [messages, messages_ready, messages_unacknowledged]).
+
+-define(COARSE_NODE_STATS,
+        [mem_used, fd_used, sockets_used, proc_used, disk_free,
+         io_read_count,  io_read_bytes,  io_read_time,
+         io_write_count, io_write_bytes, io_write_time,
+         io_sync_count,  io_sync_time,
+         io_seek_count,  io_seek_time,
+         io_reopen_count, mnesia_ram_tx_count,  mnesia_disk_tx_count,
+         msg_store_read_count, msg_store_write_count,
+         queue_index_journal_write_count,
+         queue_index_write_count, queue_index_read_count,
+         gc_num, gc_bytes_reclaimed, context_switches,
+         io_file_handle_open_attempt_count, io_file_handle_open_attempt_time]).
+
+-define(COARSE_NODE_NODE_STATS, [send_bytes, recv_bytes]).
+
+%% Normally 0 and no history means "has never happened, don't
+%% report". But for these things we do want to report even at 0 with
+%% no history.
+-define(ALWAYS_REPORT_STATS,
+        [io_read_time, io_write_time,
+         io_sync_time, sockets_used | ?QUEUE_MSG_COUNTS]).
+
+-define(COARSE_CONN_STATS, [recv_oct, send_oct]).
+
+-define(PROCESS_STATS, [reductions]).
+
+-type(event_type() :: queue_stats | queue_exchange_stats | vhost_stats
+                    | channel_queue_stats | channel_stats
+                    | channel_exchange_stats | exchange_stats
+                    | node_stats | node_node_stats | connection_stats).
+-type(type() :: deliver_get | fine_stats | queue_msg_rates | queue_msg_counts
+              | coarse_node_stats | coarse_node_node_stats | coarse_conn_stats
+              | process_stats).
+
+-type(table_name() :: atom()).
+
+%% TODO remove unused tables
+%% Not all events generate all metrics, so some of the tables may be deleted
+-define(AGGR_TABLES, [aggr_queue_stats_fine_stats,
+                      aggr_queue_stats_deliver_get,
+                      aggr_queue_stats_queue_msg_counts,
+                      aggr_queue_stats_queue_msg_rates,
+                      aggr_queue_stats_process_stats,
+                      aggr_queue_exchange_stats_fine_stats,
+                      aggr_vhost_stats_deliver_get,
+                      aggr_vhost_stats_fine_stats,
+                      aggr_vhost_stats_queue_msg_rates,
+                      aggr_vhost_stats_queue_msg_counts,
+                      aggr_vhost_stats_coarse_conn_stats,
+                      aggr_channel_queue_stats_deliver_get,
+                      aggr_channel_queue_stats_fine_stats,
+                      aggr_channel_queue_stats_queue_msg_counts,
+                      aggr_channel_stats_deliver_get,
+                      aggr_channel_stats_fine_stats,
+                      aggr_channel_stats_queue_msg_counts,
+                      aggr_channel_stats_process_stats,
+                      aggr_channel_exchange_stats_deliver_get,
+                      aggr_channel_exchange_stats_fine_stats,
+                      aggr_exchange_stats_fine_stats,
+                      aggr_node_stats_coarse_node_stats,
+                      aggr_node_node_stats_coarse_node_node_stats,
+                      aggr_connection_stats_coarse_conn_stats,
+                      aggr_connection_stats_process_stats
+                     ]).
+
+-define(INDEX_TABLES, [aggr_queue_stats_fine_stats_index,
+                       aggr_queue_stats_deliver_get_index,
+                       aggr_queue_stats_queue_msg_counts_index,
+                       aggr_queue_stats_queue_msg_rates_index,
+                       aggr_queue_stats_process_stats_index,
+                       aggr_queue_exchange_stats_fine_stats_index,
+                       aggr_vhost_stats_deliver_get_index,
+                       aggr_vhost_stats_fine_stats_index,
+                       aggr_vhost_stats_queue_msg_rates_index,
+                       aggr_vhost_stats_queue_msg_counts_index,
+                       aggr_vhost_stats_coarse_conn_stats_index,
+                       aggr_channel_queue_stats_deliver_get_index,
+                       aggr_channel_queue_stats_fine_stats_index,
+                       aggr_channel_queue_stats_queue_msg_counts_index,
+                       aggr_channel_stats_deliver_get_index,
+                       aggr_channel_stats_fine_stats_index,
+                       aggr_channel_stats_queue_msg_counts_index,
+                       aggr_channel_stats_process_stats_index,
+                       aggr_channel_exchange_stats_deliver_get_index,
+                       aggr_channel_exchange_stats_fine_stats_index,
+                       aggr_exchange_stats_fine_stats_index,
+                       aggr_node_stats_coarse_node_stats_index,
+                       aggr_node_node_stats_coarse_node_node_stats_index,
+                       aggr_connection_stats_coarse_conn_stats_index,
+                       aggr_connection_stats_process_stats_index
+                      ]).
+
+-define(KEY_INDEX_TABLES,
+        [aggr_queue_stats_fine_stats_key_index,
+         aggr_queue_stats_deliver_get_key_index,
+         aggr_queue_stats_queue_msg_counts_key_index,
+         aggr_queue_stats_queue_msg_rates_key_index,
+         aggr_queue_stats_process_stats_key_index,
+         aggr_queue_exchange_stats_fine_stats_key_index,
+         aggr_vhost_stats_deliver_get_key_index,
+         aggr_vhost_stats_fine_stats_key_index,
+         aggr_vhost_stats_queue_msg_rates_key_index,
+         aggr_vhost_stats_queue_msg_counts_key_index,
+         aggr_vhost_stats_coarse_conn_stats_key_index,
+         aggr_channel_queue_stats_deliver_get_key_index,
+         aggr_channel_queue_stats_fine_stats_key_index,
+         aggr_channel_queue_stats_queue_msg_counts_key_index,
+         aggr_channel_stats_deliver_get_key_index,
+         aggr_channel_stats_fine_stats_key_index,
+         aggr_channel_stats_queue_msg_counts_key_index,
+         aggr_channel_stats_process_stats_key_index,
+         aggr_channel_exchange_stats_deliver_get_key_index,
+         aggr_channel_exchange_stats_fine_stats_key_index,
+         aggr_exchange_stats_fine_stats_key_index,
+         aggr_node_stats_coarse_node_stats_key_index,
+         aggr_node_node_stats_coarse_node_node_stats_key_index,
+         aggr_connection_stats_coarse_conn_stats_key_index,
+         aggr_connection_stats_process_stats_key_index
+        ]).
+
+-define(PROC_STATS_TABLES,
+        [channel_stats, connection_stats]).
+
+%% Records are only used to retrieve the field position and to facilitate
+%% keeping track of the data
+-record(deliver_get, {deliver,
+                      deliver_no_ack,
+                      get,
+                      get_no_ack}).
+-record(fine_stats, {publish,
+                     publish_in,
+                     publish_out,
+                     ack,
+                     deliver_get,
+                     confirm,
+                     return_unroutable,
+                     redeliver}).
+-record(queue_msg_rates, {disk_reads,
+                          disk_writes}).
+-record(queue_msg_counts, {messages,
+                           messages_ready,
+                           messages_unacknowledged}).
+-record(coarse_node_stats, {mem_used,
+                            fd_used,
+                            sockets_used,
+                            proc_used,
+                            disk_free,
+                            io_read_count,
+                            io_read_bytes,
+                            io_read_time,
+                            io_write_count,
+                            io_write_bytes,
+                            io_write_time,
+                            io_sync_count,
+                            io_sync_time,
+                            io_seek_count,
+                            io_seek_time,
+                            io_reopen_count,
+                            mnesia_ram_tx_count,
+                            mnesia_disk_tx_count,
+                            msg_store_read_count,
+                            msg_store_write_count,
+                            queue_index_journal_write_count,
+                            queue_index_write_count,
+                            queue_index_read_count,
+                            gc_num,
+                            gc_bytes_reclaimed,
+                            context_switches,
+                            io_file_handle_open_attempt_count,
+                            io_file_handle_open_attempt_time}).
+-record(coarse_node_node_stats, {send_bytes,
+                                 recv_bytes}).
+-record(coarse_conn_stats, {recv_oct,
+                            send_oct}).
+-record(process_stats, {reductions}).
similarity index 89%
rename from rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt_test.hrl
rename to deps/rabbitmq_management/include/rabbit_mgmt_test.hrl
index 3d73c7839d4bf01ddf318321f6f1be0a554f1619..dca669d75bd932f828ac3e0e66877904d710963a 100644 (file)
@@ -7,6 +7,5 @@
 -define(BAD_REQUEST, 400).
 -define(NOT_AUTHORISED, 401).
 %%-define(NOT_FOUND, 404). Defined for AMQP by amqp_client.hrl (as 404)
--define(PREFIX, "http://localhost:15672/api").
 %% httpc seems to get racy when using HTTP 1.1
 -define(HTTPC_OPTS, [{version, "HTTP/1.0"}]).
similarity index 88%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/api/index.html
rename to deps/rabbitmq_management/priv/www/api/index.html
index a509a697b1f2e468dd3c9caaad819ef0f3059e88..1cb9ed5a46d13d140af255b20ce4fceff0500920 100644 (file)
@@ -255,6 +255,44 @@ Content-Length: 0</pre>
           "file".
         </td>
       </tr>
+      <tr>
+        <td>X</td>
+        <td></td>
+        <td></td>
+        <td>X</td>
+        <td class="path">/api/definitions/<i>vhost</i><br/>
+        </td>
+        <td>
+          The server definitions for a given virtual host -
+          exchanges, queues, bindings and policies.
+          POST to upload an existing set of definitions. Note that:
+          <ul>
+            <li>
+              The definitions are merged. Anything already existing on
+              the server but not in the uploaded definitions is
+              untouched.
+            </li>
+            <li>
+              Conflicting definitions on immutable objects (exchanges,
+              queues and bindings) will cause an error.
+            </li>
+            <li>
+              Conflicting definitions on mutable objects will cause
+              the object in the server to be overwritten with the
+              object from the definitions.
+            </li>
+            <li>
+              In the event of an error you will be left with a
+              part-applied set of definitions.
+            </li>
+          </ul>
+          For convenience you may upload a file from a browser to this
+          URI (i.e. you can use <code>multipart/form-data</code> as
+          well as <code>application/json</code>) in which case the
+          definitions should be uploaded as a form field named
+          "file".
+        </td>
+      </tr>
       <tr>
         <td>X</td>
         <td></td>
@@ -263,6 +301,14 @@ Content-Length: 0</pre>
         <td class="path">/api/connections</td>
         <td>A list of all open connections.</td>
       </tr>
+      <tr>
+        <td>X</td>
+        <td></td>
+        <td></td>
+        <td></td>
+        <td class="path">/api/vhosts/<i>vhost</i>/connections</td>
+        <td>A list of all open connections in a specific vhost.</td>
+      </tr>
       <tr>
         <td>X</td>
         <td></td>
@@ -293,6 +339,14 @@ Content-Length: 0</pre>
         <td class="path">/api/channels</td>
         <td>A list of all open channels.</td>
       </tr>
+      <tr>
+        <td>X</td>
+        <td></td>
+        <td></td>
+        <td></td>
+        <td class="path">/api/vhosts/<i>vhost</i>/channels</td>
+        <td>A list of all open channels in a specific vhost.</td>
+      </tr>
       <tr>
         <td>X</td>
         <td></td>
@@ -724,6 +778,36 @@ or:
           repeatedly pinged).
         </td>
       </tr>
+      <tr>
+        <td>X</td>
+        <td></td>
+        <td></td>
+        <td></td>
+        <td class="path">/api/healthchecks/node</td>
+        <td>
+          Runs basic healthchecks in the current node. Checks that the rabbit
+          application is running, channels and queues can be listed successfully, and
+          that no alarms are in effect. If everything is working correctly, will
+          return HTTP status 200 with body: <pre>{"status":"ok"}</pre> If
+          something fails, will return HTTP status 200 with the body of
+          <pre>{"status":"failed","reason":"string"}</pre>
+        </td>
+      </tr>
+      <tr>
+        <td>X</td>
+        <td></td>
+        <td></td>
+        <td></td>
+        <td class="path">/api/healthchecks/node/<i>node</i></td>
+        <td>
+          Runs basic healthchecks in the given node. Checks that the rabbit
+          application is running, list_channels and list_queues return, and
+          that no alarms are raised. If everything is working correctly, will
+          return HTTP status 200 with body: <pre>{"status":"ok"}</pre> If
+          something fails, will return HTTP status 200 with the body of
+          <pre>{"status":"failed","reason":"string"}</pre>
+        </td>
+      </tr>
     </table>
   </body>
 </html>
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/main.css
rename to deps/rabbitmq_management/priv/www/css/main.css
index 74a321d07f58ec45597e6de98eedbca8e48602e3..b00cb095ad8755bfa9e6572d09bc824e36531eaa 100644 (file)
@@ -40,6 +40,9 @@ div.box, div.section, div.section-hidden { overflow: auto; width: 100%; }
 .right { float: right; }
 .clear { clear: both; }
 
+.shortinput { width: 50px; text-align: right; }
+
+.help:after { content: '(?)'; }
 .help, .popup-options-link { color: #888; cursor: pointer; }
 .help:hover, .popup-options-link:hover { color: #444; }
 
similarity index 90%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/index.html
rename to deps/rabbitmq_management/priv/www/index.html
index 24b64d78f630a269a5d27321fa36f66a69858905..7ca790e53473916da033811f308aa9a0c87c6e07 100644 (file)
@@ -1,3 +1,5 @@
+<!doctype html>
+<meta http-equiv="X-UA-Compatible" content="IE=edge" />
 <html>
   <head>
     <title>RabbitMQ Management</title>
@@ -5,7 +7,7 @@
     <script src="js/jquery-1.6.4.min.js" type="text/javascript"></script>
     <script src="js/jquery.flot.min.js" type="text/javascript"></script>
     <script src="js/jquery.flot.time.min.js" type="text/javascript"></script>
-    <script src="js/sammy-0.6.0.min.js" type="text/javascript"></script>
+    <script src="js/sammy.min.js" type="text/javascript"></script>
     <script src="js/json2.js" type="text/javascript"></script>
     <script src="js/base64.js" type="text/javascript"></script>
     <script src="js/global.js" type="text/javascript"></script>
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/charts.js
rename to deps/rabbitmq_management/priv/www/js/charts.js
index 0ec370fb3219881ab2cb2774c9144e441019136e..cccbc4ca11fab80ed2af27e1e645f6b94aaafa1b 100644 (file)
@@ -29,6 +29,11 @@ function data_rates(id, stats) {
     return rates_chart_or_text(id, stats, items, fmt_rate_bytes, fmt_rate_bytes_axis, true, 'Data rates');
 }
 
+function data_reductions(id, stats) {
+    var items = [['Reductions', 'reductions']];
+    return rates_chart_or_text(id, stats, items, fmt_rate, fmt_rate_axis, true, 'Reductions (per second)', 'process-reductions');
+}
+
 function rates_chart_or_text(id, stats, items, fmt, axis_fmt, chart_rates,
                              heading, heading_help) {
     var prefix = chart_h3(id, heading, heading_help);
@@ -243,7 +248,8 @@ function render_charts() {
     });
 }
 
-var chart_colors = {full: ['#edc240', '#afd8f8', '#cb4b4b', '#4da74d', '#9440ed', '#666666', '#aaaaaa'],
+var chart_colors = {full: ['#edc240', '#afd8f8', '#cb4b4b', '#4da74d', '#9440ed', '#666666', '#aaaaaa', 
+                           '#7c79c3', '#8e6767', '#67808e', '#e5e4ae', '#4b4a55', '#bba0c1'],
                     node: ['#6ae26a', '#e24545']};
 
 var chart_chrome = {
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/dispatcher.js
rename to deps/rabbitmq_management/priv/www/js/dispatcher.js
index 4c2d6700102aca3a0515606e2392fcb5c191fb14..8cec43d01d0f8a452ae33f9fd0c040f281146cfe 100644 (file)
@@ -7,7 +7,8 @@ dispatcher_add(function(sammy) {
     sammy.get('#/', function() {
             var reqs = {'overview': {path:    '/overview',
                                      options: {ranges: ['lengths-over',
-                                                        'msg-rates-over']}}};
+                                                        'msg-rates-over']}},
+                        'vhosts': '/vhosts'};
             if (user_monitor) {
                 reqs['nodes'] = '/nodes';
             }
@@ -31,9 +32,11 @@ dispatcher_add(function(sammy) {
                    'node', '');
             });
 
-    path('#/connections',
-         {'connections': {path: '/connections', options: {sort:true}}},
-        'connections');
+    sammy.get('#/connections', function() {
+            renderConnections();
+        });
+
+
     sammy.get('#/connections/:name', function() {
             var name = esc(this.params['name']);
             render({'connection': {path:    '/connections/' + name,
@@ -52,17 +55,22 @@ dispatcher_add(function(sammy) {
            return false;
         });
 
-    path('#/channels', {'channels': {path: '/channels', options: {sort:true}}},
-         'channels');
+    sammy.get('#/channels', function() {
+            renderChannels();
+        });
+
     sammy.get('#/channels/:name', function() {
             render({'channel': {path:   '/channels/' + esc(this.params['name']),
-                                options:{ranges:['msg-rates-ch']}}},
+                                options:{ranges:['data-rates-ch','msg-rates-ch']}}},
                    'channel', '#/channels');
         });
 
-    path('#/exchanges', {'exchanges':  {path:    '/exchanges',
-                                        options: {sort:true,vhost:true}},
-                         'vhosts': '/vhosts'}, 'exchanges');
+    
+    sammy.get('#/exchanges', function() {
+            renderExchanges()
+        });
+
+
     sammy.get('#/exchanges/:vhost/:name', function() {
             var path = '/exchanges/' + esc(this.params['vhost']) + '/' + esc(this.params['name']);
             render({'exchange': {path:    path,
@@ -86,13 +94,15 @@ dispatcher_add(function(sammy) {
             return false;
         });
 
-    path('#/queues', {'queues':  {path:    '/queues',
-                                  options: {sort:true,vhost:true}},
-                      'vhosts': '/vhosts'}, 'queues');
+    sammy.get('#/queues', function() {
+                          renderQueues();
+            });
+
+    
     sammy.get('#/queues/:vhost/:name', function() {
             var path = '/queues/' + esc(this.params['vhost']) + '/' + esc(this.params['name']);
             render({'queue': {path:    path,
-                              options: {ranges:['lengths-q', 'msg-rates-q']}},
+                              options: {ranges:['lengths-q', 'msg-rates-q', 'data-rates-q']}},
                     'bindings': path + '/bindings'}, 'queue', '#/queues');
         });
     sammy.put('#/queues', function() {
@@ -147,6 +157,7 @@ dispatcher_add(function(sammy) {
                     'users': '/users/'},
                 'vhost', '#/vhosts');
         });
+
     sammy.put('#/vhosts', function() {
             if (sync_put(this, '/vhosts/:name')) {
                 update_vhosts();
similarity index 83%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/formatters.js
rename to deps/rabbitmq_management/priv/www/js/formatters.js
index b3c5bc9089467b78f4957cc3136dc65cf03580b9..a6cff8110262e613659cca38a72913dcfca6016f 100644 (file)
@@ -661,11 +661,8 @@ function fmt_highlight_filter(text) {
     }
 }
 
-function filter_ui(items) {
-    current_truncate = (current_truncate == null) ?
-        parseInt(get_pref('truncate')) : current_truncate;
+function filter_ui_pg(items, truncate, appendselect) {
     var total = items.length;
-
     if (current_filter != '') {
         var items2 = [];
         for (var i in items) {
@@ -699,24 +696,85 @@ function filter_ui(items) {
     var selected = current_filter == '' ? (items_desc(items.length)) :
         (items.length + ' of ' + items_desc(total) + ' selected');
 
-    var truncate_input = '<input type="text" id="truncate" value="' +
-        current_truncate + '">';
+    
+    selected += appendselect;
 
+    res += '<p id="filter-truncate"><span class="updatable">' + selected +
+        '</span>' + truncate + '</p>';
+    res += '</div>';
+
+    return res;
+}
+
+
+function filter_ui(items) {
+    current_truncate = (current_truncate == null) ?
+        parseInt(get_pref('truncate')) : current_truncate;
+     var truncate_input = '<input type="text" id="truncate" value="' +
+        current_truncate + '">';
+     var selected = '';    
     if (items.length > current_truncate) {
         selected += '<span id="filter-warning-show"> ' +
             '(only showing first</span> ';
         items.length = current_truncate;
     }
     else {
-        selected += ' (show at most ';
+        selected += ', page size up to ';
     }
-    res += '<p id="filter-truncate"><span class="updatable">' + selected +
-        '</span>' + truncate_input + ')</p>';
-    res += '</div>';
+   return filter_ui_pg(items, truncate_input, selected);
 
+}
+
+function paginate_header_ui(pages, context){
+     var res = '<h2 class="updatable">' ;
+     res += ' All ' + context +' (' + pages.total_count + ((pages.filtered_count != pages.total_count) ?   ' Filtered: ' + pages.filtered_count  : '') +  ')';
+     res += '</h2>'
     return res;
 }
 
+function pagiante_ui(pages, context){
+    var res = paginate_header_ui(pages, context);
+    res += '<div class="hider">';
+    res += '<h3>Pagination</h3>';
+    res += '<div class="filter">';
+    res += '<table class="updatable">';
+    res += '<tr>'
+    res += '<th><label for="'+ context +'-page">Page </label> <select id="'+ context +'-page" class="pagination_class pagination_class_select"  >';
+    var page =  fmt_page_number_request(context, pages.page);
+    if (pages.page_count > 0 &&  page > pages.page_count){
+           page = pages.page_count;
+           update_pages(context, page);
+           return;
+      };
+        for (var i = 1; i <= pages.page_count; i++) { ;
+           if (i == page) {;
+    res +=   ' <option selected="selected" value="'+ i + '">' + i + '</option>';
+              } else { ;
+    res +=    '<option value="' + i + '"> ' + i + '</option>';
+             } };
+    res += '</select> </th>';
+    res += '<th><label for="'+ context +'-pageof">of </label>  ' + pages.page_count +'</th>';
+    res += '<th><span><label for="'+ context +'-name"> - Filter: </label> <input id="'+ context +'-name"  data-page-start="1"  class="pagination_class pagination_class_input" type="text"' ;
+    res +=   'value = ' + fmt_filter_name_request(context, "") + '>' ;
+    res +=   '</input></th></span>' ;
+
+    res += '<th> <input type="checkbox" data-page-start="1" class="pagination_class pagination_class_checkbox" id="'+ context +'-filter-regex-mode"' ;
+        
+    res += fmt_regex_request(context, "") + '></input> <label for="filter-regex-mode">Regex</label> <span class="help" id="filter-regex">(?)</span></th>' ;  
+    
+    res +=' </table>' ;
+    res += '<p id="filter-truncate"><span class="updatable">';
+    res += '<span><label for="'+ context +'-pagesize"> Displaying ' + pages.item_count + '  item'+ ((pages.item_count > 1) ? 's' : '' ) + ' , page size up to: </label> ';
+    res +=       ' <input id="'+ context +'-pagesize" data-page-start="1" class="pagination_class shortinput pagination_class_input" type="text" ';
+    res +=   'value = "' +  fmt_page_size_request(context, pages.page_size) +'"';
+    res +=   'onkeypress = "return isNumberKey(event)"> </input></span></p>' ;
+    res += '</tr>'
+    res += '</div>'
+    res += '</div>'
+    return res;
+}
+
+
 function maybe_truncate(items) {
     var maximum = 500;
     var str = '';
@@ -734,7 +792,7 @@ function fmt_sort(display, sort) {
     var prefix = '';
     if (current_sort == sort) {
         prefix = '<span class="arrow">' +
-            (current_sort_reverse ? '&#9650; ' : '&#9660; ') +
+            (current_sort_reverse ? '&#9660; ' : '&#9650; ') +
             '</span>';
     }
     return '<a class="sort" sort="' + sort + '">' + prefix + display + '</a>';
@@ -798,4 +856,43 @@ function properties_size(obj) {
         if (obj.hasOwnProperty(k)) count++;
     }
     return count;
+}   
+
+function frm_default_value(template, defaultValue){
+    var store_value = get_pref(template);
+    var result = (((store_value == null) 
+      || (store_value == undefined) 
+      || (store_value == '')) ? defaultValue : 
+    store_value);
+
+   return ((result == undefined) ? defaultValue : result);
+}
+
+function fmt_page_number_request(template, defaultPage){
+     if  ((defaultPage == undefined) || (defaultPage <= 0)) 
+         defaultPage = 1;
+    return frm_default_value(template + '_current_page_number', defaultPage);
+}
+function fmt_page_size_request(template, defaultPageSize){
+    if  ((defaultPageSize == undefined) || (defaultPageSize < 0))
+        defaultPageSize = 100;
+    result = frm_default_value(template + '_current_page_size', defaultPageSize);
+    if (result > 500) result = 500; // max
+    return result;
+}
+
+function fmt_filter_name_request(template, defaultName){
+    return frm_default_value(template + '_current_filter_name', defaultName);
+}
+
+function fmt_regex_request(template, defaultName){
+    result = frm_default_value(template + '_current_regex', defaultName);
+    return result;
+}
+
+function isNumberKey(evt){
+    var charCode = (evt.which) ? evt.which : event.keyCode
+    if (charCode > 31 && (charCode < 48 || charCode > 57))
+        return false;
+    return true;
 }
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/global.js
rename to deps/rabbitmq_management/priv/www/js/global.js
index f2de0d98d6fd5c43fa975205b80c7b38d96775b1..29bce6a2a0a7a2f51fd0668448778c7efef9ac15 100644 (file)
@@ -41,8 +41,8 @@ var NAVIGATION = {'Overview':    ['#/',            "management"],
                   'Admin':
                     [{'Users':         ['#/users',    "administrator"],
                       'Virtual Hosts': ['#/vhosts',   "administrator"],
-                      'Policies':      ['#/policies', "policymaker"]},
-                     "policymaker"]
+                      'Policies':      ['#/policies', "management"]},
+                     "management"]
                  };
 
 var CHART_PERIODS = {'60|5':       'Last minute',
@@ -126,7 +126,7 @@ var COLUMNS =
                      ['memory',             'Memory',             true],
                      ['disk_space',         'Disk space',         true]],
       'General': [['uptime',     'Uptime',     false],
-                  ['rates_mode', 'Rates mode', false],
+                  ['rates_mode', 'Rates mode', true],
                   ['info',       'Info',       true]]}};
 
 ///////////////////////////////////////////////////////////////////////////
@@ -138,7 +138,7 @@ var COLUMNS =
 // All these are to do with hiding UI elements if
 var rates_mode;                  // ...there are no fine stats
 var user_administrator;          // ...user is not an admin
-var user_policymaker;            // ...user is not a policymaker
+var is_user_policymaker;         // ...user is not a policymaker
 var user_monitor;                // ...user cannot monitor
 var nodes_interesting;           // ...we are not in a cluster
 var vhosts_interesting;          // ...there is only one vhost
@@ -166,7 +166,7 @@ function setup_global_vars() {
     rates_mode = overview.rates_mode;
     user_tags = expand_user_tags(user.tags.split(","));
     user_administrator = jQuery.inArray("administrator", user_tags) != -1;
-    user_policymaker = jQuery.inArray("policymaker", user_tags) != -1;
+    is_user_policymaker = jQuery.inArray("policymaker", user_tags) != -1;
     user_monitor = jQuery.inArray("monitoring", user_tags) != -1;
     replace_content('login-details',
                     '<p>User: <b>' + fmt_escape_html(user.name) + '</b></p>' +
@@ -243,6 +243,7 @@ var current_sort_reverse = false;
 
 var current_filter = '';
 var current_filter_regex_on = false;
+
 var current_filter_regex;
 var current_truncate;
 
@@ -261,3 +262,7 @@ var update_counter = 0;
 // Holds chart data in between writing the div in an ejs and rendering
 // the chart.
 var chart_data = {};
+
+// whenever a UI requests a page that doesn't exist
+// because things were deleted between refreshes
+var last_page_out_of_range_error = 0;
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/help.js
rename to deps/rabbitmq_management/priv/www/js/help.js
index f50c19a38636a574c7c9704f492801e575a1676c..603a48d34463cbab186e111ebb5d7f9526aff1f2 100644 (file)
@@ -55,9 +55,15 @@ HELP = {
     'export-definitions':
     'The definitions consist of users, virtual hosts, permissions, parameters, exchanges, queues and bindings. They do not include the contents of queues or the cluster name. Exclusive queues will not be exported.',
 
+    'export-definitions-vhost':
+    'The definitions exported for a single virtual host consist of exchanges, queues, bindings and policies.',
+
     'import-definitions':
       'The definitions that are imported will be merged with the current definitions. If an error occurs during import, any changes made will not be rolled back.',
 
+    'import-definitions-vhost':
+    'For a single virtual host, only exchanges, queues, bindings and policies are imported.',
+
     'exchange-rates-incoming':
       'The incoming rate is the rate at which messages are published directly to this exchange.',
 
@@ -322,6 +328,18 @@ HELP = {
         <dd>Rate at which queue index segment files are written. </dd>\
       </dl>',
 
+    'gc-operations':
+    'Rate at which garbage collection operations take place on this node.',
+
+    'gc-bytes':
+    'Rate at which memory is reclaimed by the garbage collector on this node.',
+
+    'context-switches-operations':
+    'Rate at which runtime context switching takes place on this node.',
+
+    'process-reductions':
+    'Rate at which reductions take place on this process.',
+
     'foo': 'foo' // No comma.
 };
 
similarity index 82%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/main.js
rename to deps/rabbitmq_management/priv/www/js/main.js
index 8118f6215fe6c8451340a8fc11ae524cba8a000c..03a32740c2391cdfdf5f40122f469de19983d826 100644 (file)
@@ -33,6 +33,19 @@ function login_route () {
     // we've changed url.
 }
 
+function login_route_with_path() {
+  var params = ('' + this.params['splat']).split('/');
+  var user = params.shift();
+  var pass = params.shift();
+  var userpass = '' + user + ':' + pass,
+        location = window.location.href,
+        hash = window.location.hash;
+    set_auth_pref(decodeURIComponent(userpass));
+    location = location.substr(0, location.length - hash.length) + '#/' + params.join('/');
+    check_login();
+    window.location.replace(location);
+}
+
 function start_app_login() {
     app = new Sammy.Application(function () {
         this.put('#/login', function() {
@@ -41,7 +54,8 @@ function start_app_login() {
             set_auth_pref(username + ':' + password);
             check_login();
         });
-        this.get('#/login/:username/:password', login_route)
+        this.get('#/login/:username/:password', login_route);
+        this.get(/\#\/login\/(.*)/, login_route_with_path);
     });
     app.run();
     if (get_pref('auth') != null) {
@@ -83,7 +97,12 @@ function start_app() {
     // Note for when we upgrade: HashLocationProxy has become
     // DefaultLocationProxy in later versions, but otherwise the issue
     // remains.
-    Sammy.HashLocationProxy._interval = null;
+
+    // updated to the version  0.7.6 this _interval = null is fixed
+    // just leave the history here.
+    //Sammy.HashLocationProxy._interval = null;
+
+
     app = new Sammy.Application(dispatcher);
     app.run();
     var url = this.location.toString();
@@ -134,10 +153,13 @@ function update_vhosts() {
 
 function setup_extensions() {
     var extensions = JSON.parse(sync_get('/extensions'));
-    extension_count = extensions.length;
+    extension_count = 0;
     for (var i in extensions) {
         var extension = extensions[i];
-        dynamic_load(extension.javascript);
+        if ($.isPlainObject(extension) && extension.hasOwnProperty("javascript")) {
+            dynamic_load(extension.javascript);
+            extension_count++;
+        }
     }
 }
 
@@ -225,28 +247,30 @@ function update() {
 }
 
 function partial_update() {
-    if ($('.updatable').length > 0) {
-        if (update_counter >= 200) {
-            update_counter = 0;
-            full_refresh();
-            return;
-        }
-        with_update(function(html) {
-            update_counter++;
-            replace_content('scratch', html);
-            var befores = $('#main .updatable');
-            var afters = $('#scratch .updatable');
-            if (befores.length != afters.length) {
-                throw("before/after mismatch");
-            }
-            for (var i = 0; i < befores.length; i++) {
-                $(befores[i]).empty().append($(afters[i]).contents());
+    if (!$(".pagination_class").is(":focus")) {
+        if ($('.updatable').length > 0) {
+            if (update_counter >= 200) {
+                update_counter = 0;
+                full_refresh();
+                return;
             }
-            replace_content('scratch', '');
-            postprocess_partial();
-            render_charts();
-        });
-    }
+            with_update(function(html) {
+                update_counter++;
+                replace_content('scratch', html);
+                var befores = $('#main .updatable');
+                var afters = $('#scratch .updatable');
+                if (befores.length != afters.length) {
+                    throw("before/after mismatch");
+                }
+                for (var i = 0; i < befores.length; i++) {
+                    $(befores[i]).empty().append($(afters[i]).contents());
+                }
+                replace_content('scratch', '');
+                postprocess_partial();
+                render_charts();
+            });
+        }
+  }
 }
 
 function update_navigation() {
@@ -389,12 +413,17 @@ function apply_state(reqs) {
         }
         var req2;
         if (options['vhost'] != undefined && current_vhost != '') {
-            req2 = req + '/' + esc(current_vhost);
+            var indexPage = req.indexOf("?page=");
+            if (indexPage >- 1) {
+                               pageUrl = req.substr(indexPage);
+                               req2 = req.substr(0,indexPage) + '/' + esc(current_vhost) + pageUrl;
+            } else
+
+              req2 = req + '/' + esc(current_vhost);
         }
         else {
             req2 = req;
         }
-
         var qs = [];
         if (options['sort'] != undefined && current_sort != null) {
             qs.push('sort=' + current_sort);
@@ -421,8 +450,22 @@ function apply_state(reqs) {
                 qs.push(prefix + '_incr=' + parseInt(range[1]));
             }
         }
+        /* Unknown options are used as query parameters as is. */
+        Object.keys(options).forEach(function (key) {
+          /* Skip known keys we already handled and undefined parameters. */
+          if (key == 'vhost' || key == 'sort' || key == 'ranges')
+            return;
+          if (!key || options[key] == undefined)
+            return;
+
+          qs.push(esc(key) + '=' + esc(options[key]));
+        });
         qs = qs.join('&');
-        if (qs != '') qs = '?' + qs;
+        if (qs != '')
+            if (req2.indexOf("?page=") >- 1)
+            qs = '&' + qs;
+             else
+            qs = '?' + qs;
 
         reqs2[k] = req2 + qs;
     }
@@ -445,6 +488,10 @@ function show_popup(type, text, mode) {
     }
 
     hide();
+    if ($(cssClass).length && type === 'help' &&
+        $(cssClass).text().indexOf(text.replace(/<[^>]*>/g, '')) != -1 ) {
+        return;
+    }
     $('h1').after(format('error-popup', {'type': type, 'text': text}));
     if (mode == 'fade') {
         $(cssClass).fadeIn(200);
@@ -458,6 +505,17 @@ function show_popup(type, text, mode) {
     });
 }
 
+
+
+
+   function submit_import(form) {
+       var idx = $("select[name='vhost-upload'] option:selected").index()
+       var vhost = ((idx <=0 ) ? "" : "/" + esc($("select[name='vhost-upload'] option:selected").val()));
+       form.action ="api/definitions" + vhost + '?auth=' + get_pref('auth');
+       form.submit();
+     };
+
+
 function postprocess() {
     $('form.confirm').submit(function() {
             return confirm("Are you sure? This object cannot be recovered " +
@@ -477,13 +535,17 @@ function postprocess() {
             }
         });
     $('#download-definitions').click(function() {
-            var path = 'api/definitions?download=' +
+            var idx = $("select[name='vhost-download'] option:selected").index()
+            var vhost = ((idx <=0 ) ? "" : "/" + esc($("select[name='vhost-download'] option:selected").val()));
+            var path = 'api/definitions' + vhost + '?download=' +
                 esc($('#download-filename').val()) +
                 '&auth=' + get_pref('auth');
             window.location = path;
             setTimeout('app.run()');
             return false;
         });
+
+
     $('.update-manual').click(function() {
             update_manual($(this).attr('for'), $(this).attr('query'));
         });
@@ -508,7 +570,7 @@ function postprocess() {
         }
     });
     $('.help').die().live('click', function() {
-        help($(this).attr('id'))
+        help($(this).attr('id'));
     });
     $('.popup-options-link').die().live('click', function() {
         var remove = $('.popup-owner').length == 1 &&
@@ -556,10 +618,96 @@ function postprocess() {
     if (! user_administrator) {
         $('.administrator-only').remove();
     }
+
     update_multifields();
 }
 
+
+function url_pagination_template(template, defaultPage, defaultPageSize){
+   return  '/' + template + '?page=' + fmt_page_number_request(template, defaultPage) +
+                       '&page_size=' +  fmt_page_size_request(template, defaultPageSize) +
+                       '&name=' + fmt_filter_name_request(template, "") +
+                       '&use_regex=' + ((fmt_regex_request(template,"") == "checked" ? 'true' : 'false'));
+
+}
+
+
+function stored_page_info(template, page_start){
+    var pageSize = $('#' + template+'-pagesize').val();
+    var filterName = $('#' + template+'-name').val();
+
+    store_pref(template + '_current_page_number', page_start);
+    if (filterName != null && filterName != undefined) {
+        store_pref(template + '_current_filter_name', filterName);
+    }
+    var regex_on =  $("#" + template + "-filter-regex-mode").is(':checked');
+
+    if (regex_on != null && regex_on != undefined) {
+        store_pref(template + '_current_regex', regex_on ? "checked" : " " );
+    }
+
+
+    if (pageSize != null && pageSize != undefined) {
+        store_pref(template + '_current_page_size', pageSize);
+    }
+
+}
+
+function update_pages(template, page_start){
+     stored_page_info(template, page_start);
+     switch (template) {
+         case 'queues' : renderQueues(); break;
+         case 'exchanges' : renderExchanges(); break;
+         case 'connections' : renderConnections(); break;
+         case 'channels' : renderChannels(); break;
+     }
+}
+
+
+function renderQueues() {
+    render({'queues':  {path: url_pagination_template('queues', 1, 100),
+                        options: {sort:true, vhost:true, pagination:true}},
+                        'vhosts': '/vhosts'}, 'queues', '#/queues');
+}
+
+function renderExchanges() {
+    render({'exchanges':  {path: url_pagination_template('exchanges', 1, 100),
+                          options: {sort:true, vhost:true, pagination:true}},
+                         'vhosts': '/vhosts'}, 'exchanges', '#/exchanges');
+}
+
+function renderConnections() {
+    render({'connections': {path:  url_pagination_template('connections', 1, 100),
+                            options: {sort:true}}},
+                            'connections', '#/connections');
+}
+
+function renderChannels() {
+    render({'channels': {path:  url_pagination_template('channels', 1, 100),
+                        options: {sort:true}}},
+                        'channels', '#/channels');
+}
+
+
+function update_pages_from_ui(sender) {
+    update_pages(current_template, !!$(sender).attr('data-page-start') ? $(sender).attr('data-page-start') : $(sender).val());
+}
+
 function postprocess_partial() {
+    $('.pagination_class_input').keypress(function(e) {
+        if (e.keyCode == 13) {
+            update_pages_from_ui(this);
+        }
+    });
+
+    $('.pagination_class_checkbox').click(function(e) {
+        update_pages_from_ui(this);
+    });
+
+    $('.pagination_class_select').change(function(e) {
+        update_pages_from_ui(this);
+    });
+
     setup_visibility();
     $('.sort').click(function() {
             var sort = $(this).attr('sort');
@@ -572,7 +720,6 @@ function postprocess_partial() {
             }
             update();
         });
-    $('.help').html('(?)');
     // TODO remove this hack when we get rid of "updatable"
     if ($('#filter-warning-show').length > 0) {
         $('#filter-truncate').addClass('filter-warning');
@@ -873,6 +1020,7 @@ function with_req(method, path, body, fun) {
     var req = xmlHttpRequest();
     req.open(method, 'api' + path, true );
     req.setRequestHeader('authorization', auth_header());
+    req.setRequestHeader('x-vhost', current_vhost);
     req.onreadystatechange = function () {
         if (req.readyState == 4) {
             var ix = jQuery.inArray(req, outstanding_reqs);
@@ -966,7 +1114,29 @@ function check_bad_response(req, full_page_404) {
     else if (req.status >= 400 && req.status <= 404) {
         var reason = JSON.parse(req.responseText).reason;
         if (typeof(reason) != 'string') reason = JSON.stringify(reason);
-        show_popup('warn', reason);
+
+        var error = JSON.parse(req.responseText).error;
+        if (typeof(error) != 'string') error = JSON.stringify(error);
+
+        if (error == 'bad_request' || error == 'not_found') {
+            show_popup('warn', reason);
+        } else if (error == 'page_out_of_range') {
+            var seconds = 60;
+            if (last_page_out_of_range_error > 0)
+                    seconds = (new Date().getTime() - last_page_out_of_range_error.getTime())/1000;
+            if (seconds > 3) {
+                 Sammy.log('server reports page is out of range, redirecting to page 1');
+                 var contexts = ["queues", "exchanges", "connections", "channels"];
+                 var matches = /api\/(.*)\?/.exec(req.responseURL);
+                 if (matches != null && matches.length > 1) {
+                     contexts.forEach(function(item) {
+                         if (matches[1].indexOf(item) == 0) {update_pages(item, 1)};
+                     });
+                 } else update_pages(current_template, 1);
+
+                 last_page_out_of_range_error = new Date();
+            }
+        }
     }
     else if (req.status == 408) {
         update_status('timeout');
@@ -1178,7 +1348,7 @@ function keys(obj) {
     return ks;
 }
 
-// Don't use the jQuery AJAX support, it seemss to have trouble reporting
+// Don't use the jQuery AJAX support, it seems to have trouble reporting
 // server-down type errors.
 function xmlHttpRequest() {
     var res;
similarity index 62%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.js
rename to deps/rabbitmq_management/priv/www/js/sammy.js
index 4fcd72b4c3f7aeea237fe3bba14bc10eb3ec12b3..b0ff981c0621703a8df0307a49e6c540a54094af 100644 (file)
@@ -1,25 +1,41 @@
 // name: sammy
-// version: 0.6.0pre
-
-(function($) {
+// version: 0.7.6
+
+// Sammy.js / http://sammyjs.org
+
+(function(factory){
+  // Support module loading scenarios
+  if (typeof define === 'function' && define.amd){
+    // AMD Anonymous Module
+    define(['jquery'], factory);
+  } else {
+    // No module loader (plain <script> tag) - put directly in global namespace
+    jQuery.sammy = window.Sammy = factory(jQuery);
+  }
+})(function($){
 
   var Sammy,
       PATH_REPLACER = "([^\/]+)",
       PATH_NAME_MATCHER = /:([\w\d]+)/g,
-      QUERY_STRING_MATCHER = /\?([^#]*)$/,
+      QUERY_STRING_MATCHER = /\?([^#]*)?$/,
       // mainly for making `arguments` an Array
       _makeArray = function(nonarray) { return Array.prototype.slice.call(nonarray); },
       // borrowed from jQuery
       _isFunction = function( obj ) { return Object.prototype.toString.call(obj) === "[object Function]"; },
       _isArray = function( obj ) { return Object.prototype.toString.call(obj) === "[object Array]"; },
-      _decode = decodeURIComponent,
+      _isRegExp = function( obj ) { return Object.prototype.toString.call(obj) === "[object RegExp]"; },
+      _decode = function( str ) { return decodeURIComponent((str || '').replace(/\+/g, ' ')); },
+      _encode = encodeURIComponent,
       _escapeHTML = function(s) {
-        return s.replace(/&/g,'&amp;').replace(/</g,'&lt;').replace(/>/g,'&gt;');
+        return String(s).replace(/&(?!\w+;)/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;');
       },
       _routeWrapper = function(verb) {
-        return function(path, callback) { return this.route.apply(this, [verb, path, callback]); };
+        return function() {
+          return this.route.apply(this, [verb].concat(Array.prototype.slice.call(arguments)));
+        };
       },
       _template_cache = {},
+      _has_history = !!(window.history && history.pushState),
       loggers = [];
 
 
@@ -40,7 +56,7 @@
   //      // returns the app at #main or a new app
   //      Sammy('#main')
   //
-  //      // equivilent to "new Sammy.Application", except appends to apps
+  //      // equivalent to "new Sammy.Application", except appends to apps
   //      Sammy();
   //      Sammy(function() { ... });
   //
@@ -61,7 +77,7 @@
           app.use(plugin);
         });
       }
-      // if the selector changes make sure the refrence in Sammy.apps changes
+      // if the selector changes make sure the reference in Sammy.apps changes
       if (app.element_selector != selector) {
         delete Sammy.apps[selector];
       }
@@ -70,7 +86,7 @@
     }
   };
 
-  Sammy.VERSION = '0.6.0';
+  Sammy.VERSION = '0.7.6';
 
   // Add to the global logger pool. Takes a function that accepts an
   // unknown number of arguments and should print them or send them somewhere
   };
 
   if (typeof window.console != 'undefined') {
-    if (_isFunction(console.log.apply)) {
+    if (typeof window.console.log === 'function' && _isFunction(window.console.log.apply)) {
       Sammy.addLogger(function() {
-        window.console.log.apply(console, arguments);
+        window.console.log.apply(window.console, arguments);
       });
     } else {
       Sammy.addLogger(function() {
     makeArray: _makeArray,
     isFunction: _isFunction,
     isArray: _isArray
-  })
+  });
 
   // Sammy.Object is the base for all other Sammy classes. It provides some useful
   // functionality, including cloning, iterating, etc.
     // Does not render functions.
     // For example. Given this Sammy.Object:
     //
-    //    var s = new Sammy.Object({first_name: 'Sammy', last_name: 'Davis Jr.'});
-    //    s.toHTML() //=> '<strong>first_name</strong> Sammy<br /><strong>last_name</strong> Davis Jr.<br />'
+    //     var s = new Sammy.Object({first_name: 'Sammy', last_name: 'Davis Jr.'});
+    //     s.toHTML()
+    //     //=> '<strong>first_name</strong> Sammy<br /><strong>last_name</strong> Davis Jr.<br />'
     //
     toHTML: function() {
       var display = "";
 
     // Checks if the object has a value at `key` and that the value is not empty
     has: function(key) {
-      return this[key] && $.trim(this[key].toString()) != '';
+      return this[key] && $.trim(this[key].toString()) !== '';
     },
 
     // convenience method to join as many arguments as you want
     }
   });
 
-  // The HashLocationProxy is the default location proxy for all Sammy applications.
+
+  // Return whether the event targets this window.
+  Sammy.targetIsThisWindow = function targetIsThisWindow(event, tagName) {
+    var targetElement = $(event.target).closest(tagName);
+    if (targetElement.length === 0) { return true; }
+
+    var targetWindow = targetElement.attr('target');
+    if (!targetWindow || targetWindow === window.name || targetWindow === '_self') { return true; }
+    if (targetWindow === '_blank') { return false; }
+    if (targetWindow === 'top' && window === window.top) { return true; }
+    return false;
+  };
+
+
+  // The DefaultLocationProxy is the default location proxy for all Sammy applications.
   // A location proxy is a prototype that conforms to a simple interface. The purpose
   // of a location proxy is to notify the Sammy.Application its bound to when the location
-  // or 'external state' changes. The HashLocationProxy considers the state to be
-  // changed when the 'hash' (window.location.hash / '#') changes. It does this in two
-  // different ways depending on what browser you are using. The newest browsers
-  // (IE, Safari > 4, FF >= 3.6) support a 'onhashchange' DOM event, thats fired whenever
-  // the location.hash changes. In this situation the HashLocationProxy just binds
-  // to this event and delegates it to the application. In the case of older browsers
-  // a poller is set up to track changes to the hash. Unlike Sammy 0.3 or earlier,
-  // the HashLocationProxy allows the poller to be a global object, eliminating the
-  // need for multiple pollers even when thier are multiple apps on the page.
-  Sammy.HashLocationProxy = function(app, run_interval_every) {
+  // or 'external state' changes.
+  //
+  // The `DefaultLocationProxy` watches for changes to the path of the current window and
+  // is also able to set the path based on changes in the application. It does this by
+  // using different methods depending on what is available in the current browser. In
+  // the latest and greatest browsers it used the HTML5 History API and the `pushState`
+  // `popState` events/methods. This allows you to use Sammy to serve a site behind normal
+  // URI paths as opposed to the older default of hash (#) based routing. Because the server
+  // can interpret the changed path on a refresh or re-entry, though, it requires additional
+  // support on the server side. If you'd like to force disable HTML5 history support, please
+  // use the `disable_push_state` setting on `Sammy.Application`. If pushState support
+  // is enabled, `DefaultLocationProxy` also binds to all links on the page. If a link is clicked
+  // that matches the current set of routes, the URL is changed using pushState instead of
+  // fully setting the location and the app is notified of the change.
+  //
+  // If the browser does not have support for HTML5 History, `DefaultLocationProxy` automatically
+  // falls back to the older hash based routing. The newest browsers (IE, Safari > 4, FF >= 3.6)
+  // support a 'onhashchange' DOM event, thats fired whenever the location.hash changes.
+  // In this situation the DefaultLocationProxy just binds to this event and delegates it to
+  // the application. In the case of older browsers a poller is set up to track changes to the
+  // hash.
+  Sammy.DefaultLocationProxy = function(app, run_interval_every) {
     this.app = app;
     // set is native to false and start the poller immediately
     this.is_native = false;
+    this.has_history = _has_history;
     this._startPolling(run_interval_every);
   };
 
-  Sammy.HashLocationProxy.prototype = {
-
+  Sammy.DefaultLocationProxy.fullPath = function(location_obj) {
+   // Bypass the `window.location.hash` attribute.  If a question mark
+    // appears in the hash IE6 will strip it and all of the following
+    // characters from `window.location.hash`.
+    var matches = location_obj.toString().match(/^[^#]*(#.+)$/);
+    var hash = matches ? matches[1] : '';
+    return [location_obj.pathname, location_obj.search, hash].join('');
+  };
+$.extend(Sammy.DefaultLocationProxy.prototype , {
     // bind the proxy events to the current app.
     bind: function() {
-      var proxy = this, app = this.app;
+      var proxy = this, app = this.app, lp = Sammy.DefaultLocationProxy;
       $(window).bind('hashchange.' + this.app.eventNamespace(), function(e, non_native) {
         // if we receive a native hash change event, set the proxy accordingly
         // and stop polling
         if (proxy.is_native === false && !non_native) {
-          Sammy.log('native hash change exists, using');
           proxy.is_native = true;
-          clearInterval(Sammy.HashLocationProxy._interval);
+          window.clearInterval(lp._interval);
+          lp._interval = null;
         }
         app.trigger('location-changed');
       });
-      if (!Sammy.HashLocationProxy._bindings) {
-        Sammy.HashLocationProxy._bindings = 0;
+      if (_has_history && !app.disable_push_state) {
+        // bind to popstate
+        $(window).bind('popstate.' + this.app.eventNamespace(), function(e) {
+          app.trigger('location-changed');
+        });
+        // bind to link clicks that have routes
+        $(document).delegate('a', 'click.history-' + this.app.eventNamespace(), function (e) {
+          if (e.isDefaultPrevented() || e.metaKey || e.ctrlKey) {
+            return;
+          }
+          var full_path = lp.fullPath(this),
+            // Get anchor's host name in a cross browser compatible way.
+            // IE looses hostname property when setting href in JS
+            // with a relative URL, e.g. a.setAttribute('href',"/whatever").
+            // Circumvent this problem by creating a new link with given URL and
+            // querying that for a hostname.
+            hostname = this.hostname ? this.hostname : function (a) {
+              var l = document.createElement("a");
+              l.href = a.href;
+              return l.hostname;
+            }(this);
+
+          if (hostname == window.location.hostname &&
+              app.lookupRoute('get', full_path) &&
+              Sammy.targetIsThisWindow(e, 'a')) {
+            e.preventDefault();
+            proxy.setLocation(full_path);
+            return false;
+          }
+        });
+      }
+      if (!lp._bindings) {
+        lp._bindings = 0;
       }
-      Sammy.HashLocationProxy._bindings++;
+      lp._bindings++;
     },
 
     // unbind the proxy events from the current app
     unbind: function() {
       $(window).unbind('hashchange.' + this.app.eventNamespace());
-      Sammy.HashLocationProxy._bindings--;
-      if (Sammy.HashLocationProxy._bindings <= 0) {
-        clearInterval(Sammy.HashLocationProxy._interval);
+      $(window).unbind('popstate.' + this.app.eventNamespace());
+      $(document).undelegate('a', 'click.history-' + this.app.eventNamespace());
+      Sammy.DefaultLocationProxy._bindings--;
+      if (Sammy.DefaultLocationProxy._bindings <= 0) {
+        window.clearInterval(Sammy.DefaultLocationProxy._interval);
+        Sammy.DefaultLocationProxy._interval = null;
       }
     },
 
     // get the current location from the hash.
     getLocation: function() {
-     // Bypass the `window.location.hash` attribute.  If a question mark
-      // appears in the hash IE6 will strip it and all of the following
-      // characters from `window.location.hash`.
-      var matches = window.location.toString().match(/^[^#]*(#.+)$/);
-      return matches ? matches[1] : '';
+      return Sammy.DefaultLocationProxy.fullPath(window.location);
     },
 
     // set the current location to `new_location`
     setLocation: function(new_location) {
-      return (window.location = new_location);
+      if (/^([^#\/]|$)/.test(new_location)) { // non-prefixed url
+        if (_has_history && !this.app.disable_push_state) {
+          new_location = '/' + new_location;
+        } else {
+          new_location = '#!/' + new_location;
+        }
+      }
+      if (new_location != this.getLocation()) {
+        // HTML5 History exists and new_location is a full path
+        if (_has_history && !this.app.disable_push_state && /^\//.test(new_location)) {
+          history.pushState({ path: new_location }, window.title, new_location);
+          this.app.trigger('location-changed');
+        } else {
+          return (window.location = new_location);
+        }
+      }
     },
 
     _startPolling: function(every) {
       // set up interval
       var proxy = this;
-      if (!Sammy.HashLocationProxy._interval) {
+      if (!Sammy.DefaultLocationProxy._interval) {
         if (!every) { every = 10; }
         var hashCheck = function() {
-          current_location = proxy.getLocation();
-          if (!Sammy.HashLocationProxy._last_location ||
-            current_location != Sammy.HashLocationProxy._last_location) {
-            setTimeout(function() {
+          var current_location = proxy.getLocation();
+          if (typeof Sammy.DefaultLocationProxy._last_location == 'undefined' ||
+            current_location != Sammy.DefaultLocationProxy._last_location) {
+            window.setTimeout(function() {
               $(window).trigger('hashchange', [true]);
-            }, 13);
+            }, 0);
           }
-          Sammy.HashLocationProxy._last_location = current_location;
+          Sammy.DefaultLocationProxy._last_location = current_location;
         };
         hashCheck();
-        Sammy.HashLocationProxy._interval = setInterval(hashCheck, every);
+        Sammy.DefaultLocationProxy._interval = window.setInterval(hashCheck, every);
       }
     }
-  };
+  });
 
 
   // Sammy.Application is the Base prototype for defining 'applications'.
     if (_isFunction(app_function)) {
       app_function.apply(this, [this]);
     }
-    // set the location proxy if not defined to the default (HashLocationProxy)
+    // set the location proxy if not defined to the default (DefaultLocationProxy)
     if (!this._location_proxy) {
-      this.setLocationProxy(new Sammy.HashLocationProxy(this, this.run_interval_every));
+      this.setLocationProxy(new Sammy.DefaultLocationProxy(this, this.run_interval_every));
     }
     if (this.debug) {
       this.bindToAllEvents(function(e, data) {
 
     // An array of the default events triggered by the
     // application during its lifecycle
-    APP_EVENTS: ['run','unload','lookup-route','run-route','route-found','event-context-before','event-context-after','changed','error','check-form-submission','redirect'],
+    APP_EVENTS: ['run', 'unload', 'lookup-route', 'run-route', 'route-found', 'event-context-before', 'event-context-after', 'changed', 'error', 'check-form-submission', 'redirect', 'location-changed'],
 
     _last_route: null,
     _location_proxy: null,
     // When set to true, logs all of the default events using `log()`
     debug: false,
 
-    // When set to true, and the error() handler is not overriden, will actually
+    // When set to true, and the error() handler is not overridden, will actually
     // raise JS errors in routes (500) and when routes can't be found (404)
     raise_errors: false,
 
     // The time in milliseconds that the URL is queried for changes
     run_interval_every: 50,
 
+    // if using the `DefaultLocationProxy` setting this to true will force the app to use
+    // traditional hash based routing as opposed to the new HTML5 PushState support
+    disable_push_state: false,
+
     // The default template engine to use when using `partial()` in an
     // `EventContext`. `template_engine` can either be a string that
     // corresponds to the name of a method/helper on EventContext or it can be a function
     // that takes two arguments, the content of the unrendered partial and an optional
-    // JS object that contains interpolation data. Template engine is only called/refered
+    // JS object that contains interpolation data. Template engine is only called/referred
     // to if the extension of the partial is null or unknown. See `partial()`
     // for more information
     template_engine: null,
     },
 
     // returns a jQuery object of the Applications bound element.
-    $element: function() {
-      return $(this.element_selector);
+    $element: function(selector) {
+      return selector ? $(this.element_selector).find(selector) : $(this.element_selector);
     },
 
     // `use()` is the entry point for including Sammy plugins.
     // Any additional arguments are passed to the app function sequentially.
     //
     // For much more detail about plugins, check out:
-    // http://code.quirkey.com/sammy/doc/plugins.html
+    // [http://sammyjs.org/docs/plugins](http://sammyjs.org/docs/plugins)
     //
     // ### Example
     //
     //      });
     //
     // If plugin is passed as a string it assumes your are trying to load
-    // Sammy."Plugin". This is the prefered way of loading core Sammy plugins
+    // Sammy."Plugin". This is the preferred way of loading core Sammy plugins
     // as it allows for better error-messaging.
     //
     // ### Example
     },
 
     // Sets the location proxy for the current app. By default this is set to
-    // a new `Sammy.HashLocationProxy` on initialization. However, you can set
+    // a new `Sammy.DefaultLocationProxy` on initialization. However, you can set
     // the location_proxy inside you're app function to give your app a custom
-    // location mechanism. See `Sammy.HashLocationProxy` and `Sammy.DataLocationProxy`
+    // location mechanism. See `Sammy.DefaultLocationProxy` and `Sammy.DataLocationProxy`
     // for examples.
     //
     // `setLocationProxy()` takes an initialized location proxy.
       }
     },
 
+    // provide log() override for inside an app that includes the relevant application element_selector
+    log: function() {
+      Sammy.log.apply(Sammy, Array.prototype.concat.apply([this.element_selector],arguments));
+    },
+
+
     // `route()` is the main method for defining routes within an application.
-    // For great detail on routes, check out: http://code.quirkey.com/sammy/doc/routes.html
+    // For great detail on routes, check out:
+    // [http://sammyjs.org/docs/routes](http://sammyjs.org/docs/routes)
     //
     // This method also has aliases for each of the different verbs (eg. `get()`, `post()`, etc.)
     //
     //    the first argument is the path, the second is the callback and the verb
     //    is assumed to be 'any'.
     // * `path` A Regexp or a String representing the path to match to invoke this verb.
-    // * `callback` A Function that is called/evaluated whent the route is run see: `runRoute()`.
+    // * `callback` A Function that is called/evaluated when the route is run see: `runRoute()`.
     //    It is also possible to pass a string as the callback, which is looked up as the name
     //    of a method on the application.
     //
-    route: function(verb, path, callback) {
-      var app = this, param_names = [], add_route;
+    route: function(verb, path) {
+      var app = this, param_names = [], add_route, path_match, callback = Array.prototype.slice.call(arguments,2);
 
       // if the method signature is just (path, callback)
       // assume the verb is 'any'
-      if (!callback && _isFunction(path)) {
+      if (callback.length === 0 && _isFunction(path)) {
+        callback = [path];
         path = verb;
-        callback = path;
         verb = 'any';
       }
 
           param_names.push(path_match[1]);
         }
         // replace with the path replacement
-        path = new RegExp("^" + path.replace(PATH_NAME_MATCHER, PATH_REPLACER) + "$");
-      }
-      // lookup callback
-      if (typeof callback == 'string') {
-        callback = app[callback];
+        path = new RegExp(path.replace(PATH_NAME_MATCHER, PATH_REPLACER) + "$");
       }
+      // lookup callbacks
+      $.each(callback,function(i,cb){
+        if (typeof(cb) === 'string') {
+          callback[i] = app[cb];
+        }
+      });
 
       add_route = function(with_verb) {
         var r = {verb: with_verb, path: path, callback: callback, param_names: param_names};
     //
     // ### Example
     //
-    //    var app = $.sammy(function() {
+    //      var app = $.sammy(function() {
     //
-    //      this.mapRoutes([
-    //          ['get', '#/', function() { this.log('index'); }],
-    //          // strings in callbacks are looked up as methods on the app
-    //          ['post', '#/create', 'addUser'],
-    //          // No verb assumes 'any' as the verb
-    //          [/dowhatever/, function() { this.log(this.verb, this.path)}];
-    //        ]);
-    //    })
+    //        this.mapRoutes([
+    //            ['get', '#/', function() { this.log('index'); }],
+    //            // strings in callbacks are looked up as methods on the app
+    //            ['post', '#/create', 'addUser'],
+    //            // No verb assumes 'any' as the verb
+    //            [/dowhatever/, function() { this.log(this.verb, this.path)}];
+    //          ]);
+    //      });
     //
     mapRoutes: function(route_array) {
       var app = this;
       return ['sammy-app', this.namespace].join('-');
     },
 
-    // Works just like `jQuery.fn.bind()` with a couple noteable differences.
+    // Works just like `jQuery.fn.bind()` with a couple notable differences.
     //
     // * It binds all events to the application element
     // * All events are bound within the `eventNamespace()`
     // * Events are not actually bound until the application is started with `run()`
     // * callbacks are evaluated within the context of a Sammy.EventContext
     //
-    // See http://code.quirkey.com/sammy/docs/events.html for more info.
-    //
     bind: function(name, data, callback) {
       var app = this;
       // build the callback
     // that take a single argument `callback` which is the entire route
     // execution path wrapped up in a closure. This means you can decide whether
     // or not to proceed with execution by not invoking `callback` or,
-    // more usefuly wrapping callback inside the result of an asynchronous execution.
+    // more usefully wrapping callback inside the result of an asynchronous execution.
     //
     // ### Example
     //
       return this;
     },
 
+    // Adds a onComplete function to the application. onComplete functions are executed
+    // at the end of a chain of route callbacks, if they call next(). Unlike after,
+    // which is called as soon as the route is complete, onComplete is like a final next()
+    // for all routes, and is thus run asynchronously
+    //
+    // ### Example
+    //
+    //      app.get('/chain',function(context,next) {
+    //          console.log('chain1');
+    //          next();
+    //      },function(context,next) {
+    //          console.log('chain2');
+    //          next();
+    //      });
+    //
+    //      app.get('/link',function(context,next) {
+    //          console.log('link1');
+    //          next();
+    //      },function(context,next) {
+    //          console.log('link2');
+    //          next();
+    //      });
+    //
+    //      app.onComplete(function() {
+    //          console.log("Running finally");
+    //      });
+    //
+    // If you go to '/chain', you will get the following messages:
+    //
+    //      chain1
+    //      chain2
+    //      Running onComplete
+    //
+    //
+    // If you go to /link, you will get the following messages:
+    //
+    //      link1
+    //      link2
+    //      Running onComplete
+    //
+    //
+    // It really comes to play when doing asynchronous:
+    //
+    //      app.get('/chain',function(context,next) {
+    //        $.get('/my/url',function() {
+    //          console.log('chain1');
+    //          next();
+    //        });
+    //      },function(context,next) {
+    //        console.log('chain2');
+    //        next();
+    //      });
+    //
+    onComplete: function(callback) {
+      this._onComplete = callback;
+      return this;
+    },
+
     // Returns `true` if the current application is running.
     isRunning: function() {
       return this._running;
     //
     // ### Example
     //
-    //    var app = $.sammy(function() {
+    //     var app = $.sammy(function() {
     //
-    //      helpers({
-    //        upcase: function(text) {
-    //         return text.toString().toUpperCase();
-    //        }
-    //      });
+    //       helpers({
+    //         upcase: function(text) {
+    //          return text.toString().toUpperCase();
+    //         }
+    //       });
     //
-    //      get('#/', function() { with(this) {
-    //        // inside of this context I can use the helpers
-    //        $('#main').html(upcase($('#main').text());
-    //      }});
+    //       get('#/', function() { with(this) {
+    //         // inside of this context I can use the helpers
+    //         $('#main').html(upcase($('#main').text());
+    //       }});
     //
-    //    });
+    //     });
     //
     //
     // ### Arguments
     //
     // ### Example
     //
-    //    var app = $.sammy(function() { ... }); // your application
-    //    $(function() { // document.ready
+    //     var app = $.sammy(function() { ... }); // your application
+    //     $(function() { // document.ready
     //        app.run();
     //     });
     //
       this._running = true;
       // set last location
       this.last_location = null;
-      if (this.getLocation() == '' && typeof start_url != 'undefined') {
+      if (!(/\#(.+)/.test(this.getLocation())) && typeof start_url != 'undefined') {
         this.setLocation(start_url);
       }
       // check url
 
       // bind to submit to capture post/put/delete routes
       this.bind('submit', function(e) {
+        if ( !Sammy.targetIsThisWindow(e, 'form') ) { return true; }
         var returned = app._checkFormSubmission($(e.target).closest('form'));
         return (returned === false) ? e.preventDefault() : false;
       });
 
       // bind unload to body unload
-      $(window).bind('beforeunload', function() {
+      $(window).bind('unload', function() {
         app.unload();
       });
 
     },
 
     // The opposite of `run()`, un-binds all event listeners and intervals
-    // `run()` Automaticaly binds a `onunload` event to run this when
+    // `run()` Automatically binds a `onunload` event to run this when
     // the document is closed.
     unload: function() {
       if (!this.isRunning()) { return false; }
       return this;
     },
 
+    // Not only runs `unbind` but also destroys the app reference.
+    destroy: function() {
+      this.unload();
+      delete Sammy.apps[this.element_selector];
+      return this;
+    },
+
     // Will bind a single callback function to every event that is already
     // being listened to in the app. This includes all the `APP_EVENTS`
     // as well as any custom events defined with `bind()`.
       });
       // next, bind to listener names (only if they dont exist in APP_EVENTS)
       $.each(this.listeners.keys(true), function(i, name) {
-        if (app.APP_EVENTS.indexOf(name) == -1) {
+        if ($.inArray(name, app.APP_EVENTS) == -1) {
           app.bind(name, callback);
         }
       });
     // Given a verb and a String path, will return either a route object or false
     // if a matching route can be found within the current defined set.
     lookupRoute: function(verb, path) {
-      var app = this, routed = false;
-      this.trigger('lookup-route', {verb: verb, path: path});
+      var app = this, routed = false, i = 0, l, route;
       if (typeof this.routes[verb] != 'undefined') {
-        $.each(this.routes[verb], function(i, route) {
+        l = this.routes[verb].length;
+        for (; i < l; i++) {
+          route = this.routes[verb][i];
           if (app.routablePath(path).match(route.path)) {
             routed = route;
-            return false;
+            break;
           }
-        });
+        }
       }
       return routed;
     },
     // possible URL params and then invokes the route's callback within a new
     // `Sammy.EventContext`. If the route can not be found, it calls
     // `notFound()`. If `raise_errors` is set to `true` and
-    // the `error()` has not been overriden, it will throw an actual JS
+    // the `error()` has not been overridden, it will throw an actual JS
     // error.
     //
     // You probably will never have to call this directly.
           befores,
           before,
           callback_args,
+          path_params,
           final_returned;
 
-      this.log('runRoute', [verb, path].join(' '));
+      if (this.debug) {
+        this.log('runRoute', [verb, path].join(' '));
+      }
+
       this.trigger('run-route', {verb: verb, path: path, params: params});
       if (typeof params == 'undefined') { params = {}; }
 
         arounds = this.arounds.slice(0);
         befores = this.befores.slice(0);
         // set the callback args to the context + contents of the splat
-        callback_args = [context].concat(params.splat);
+        callback_args = [context];
+        if (params.splat) {
+          callback_args = callback_args.concat(params.splat);
+        }
         // wrap the route up with the before filters
         wrapped_route = function() {
-          var returned;
+          var returned, i, nextRoute;
           while (befores.length > 0) {
             before = befores.shift();
             // check the options
           }
           app.last_route = route;
           context.trigger('event-context-before', {context: context});
-          returned = route.callback.apply(context, callback_args);
+          // run multiple callbacks
+          if (typeof(route.callback) === "function") {
+            route.callback = [route.callback];
+          }
+          if (route.callback && route.callback.length) {
+            i = -1;
+            nextRoute = function() {
+              i++;
+              if (route.callback[i]) {
+                returned = route.callback[i].apply(context,callback_args);
+              } else if (app._onComplete && typeof(app._onComplete === "function")) {
+                app._onComplete(context);
+              }
+            };
+            callback_args.push(nextRoute);
+            nextRoute();
+          }
           context.trigger('event-context-after', {context: context});
           return returned;
         };
     //     // match against a path string
     //     app.contextMatchesOptions(context, '#/mypath'); //=> true
     //     app.contextMatchesOptions(context, '#/otherpath'); //=> false
-    //     // equivilent to
+    //     // equivalent to
     //     app.contextMatchesOptions(context, {only: {path:'#/mypath'}}); //=> true
     //     app.contextMatchesOptions(context, {only: {path:'#/otherpath'}}); //=> false
     //     // match against a path regexp
     //     // match all except a path
     //     app.contextMatchesOptions(context, {except: {path:'#/otherpath'}}); //=> true
     //     app.contextMatchesOptions(context, {except: {path:'#/mypath'}}); //=> false
+    //     // match all except a verb and a path
+    //     app.contextMatchesOptions(context, {except: {path:'#/otherpath', verb:'post'}}); //=> true
+    //     app.contextMatchesOptions(context, {except: {path:'#/mypath', verb:'post'}}); //=> true
+    //     app.contextMatchesOptions(context, {except: {path:'#/mypath', verb:'get'}}); //=> false
+    //     // match multiple paths
+    //     app.contextMatchesOptions(context, {path: ['#/mypath', '#/otherpath']}); //=> true
+    //     app.contextMatchesOptions(context, {path: ['#/otherpath', '#/thirdpath']}); //=> false
+    //     // equivalent to
+    //     app.contextMatchesOptions(context, {only: {path: ['#/mypath', '#/otherpath']}}); //=> true
+    //     app.contextMatchesOptions(context, {only: {path: ['#/otherpath', '#/thirdpath']}}); //=> false
+    //     // match all except multiple paths
+    //     app.contextMatchesOptions(context, {except: {path: ['#/mypath', '#/otherpath']}}); //=> false
+    //     app.contextMatchesOptions(context, {except: {path: ['#/otherpath', '#/thirdpath']}}); //=> true
+    //     // match all except multiple paths and verbs
+    //     app.contextMatchesOptions(context, {except: {path: ['#/mypath', '#/otherpath'], verb: ['get', 'post']}}); //=> false
+    //     app.contextMatchesOptions(context, {except: {path: ['#/otherpath', '#/thirdpath'], verb: ['get', 'post']}}); //=> true
     //
     contextMatchesOptions: function(context, match_options, positive) {
-      // empty options always match
       var options = match_options;
-      if (typeof options === 'undefined' || options == {}) {
-        return true;
+      // normalize options
+      if (typeof options === 'string' || _isRegExp(options)) {
+        options = {path: options};
       }
       if (typeof positive === 'undefined') {
         positive = true;
       }
-      // normalize options
-      if (typeof options === 'string' || _isFunction(options.test)) {
-        options = {path: options};
+      // empty options always match
+      if ($.isEmptyObject(options)) {
+        return true;
+      }
+      // Do we have to match against multiple paths?
+      if (_isArray(options.path)){
+        var results, numopt, opts, len;
+        results = [];
+        for (numopt = 0, len = options.path.length; numopt < len; numopt += 1) {
+          opts = $.extend({}, options, {path: options.path[numopt]});
+          results.push(this.contextMatchesOptions(context, opts));
+        }
+        var matched = $.inArray(true, results) > -1 ? true : false;
+        return positive ? matched : !matched;
       }
       if (options.only) {
         return this.contextMatchesOptions(context, options.only, true);
       }
       var path_matched = true, verb_matched = true;
       if (options.path) {
-        // wierd regexp test
-        if (_isFunction(options.path.test)) {
-          path_matched = options.path.test(context.path);
-        } else {
-          path_matched = (options.path.toString() === context.path);
+        if (!_isRegExp(options.path)) {
+          options.path = new RegExp(options.path.toString() + '$');
         }
+        path_matched = options.path.test(context.path);
       }
       if (options.verb) {
-        verb_matched = options.verb === context.verb;
+        if(typeof options.verb === 'string') {
+          verb_matched = options.verb === context.verb;
+        } else {
+          verb_matched = options.verb.indexOf(context.verb) > -1;
+        }
       }
       return positive ? (verb_matched && path_matched) : !(verb_matched && path_matched);
     },
 
 
     // Delegates to the `location_proxy` to get the current location.
-    // See `Sammy.HashLocationProxy` for more info on location proxies.
+    // See `Sammy.DefaultLocationProxy` for more info on location proxies.
     getLocation: function() {
       return this._location_proxy.getLocation();
     },
 
     // Delegates to the `location_proxy` to set the current location.
-    // See `Sammy.HashLocationProxy` for more info on location proxies.
+    // See `Sammy.DefaultLocationProxy` for more info on location proxies.
     //
     // ### Arguments
     //
     //
     // ### Example
     //
-    //    var app = $.sammy(function() {
+    //      var app = $.sammy(function() {
     //
-    //      // implements a 'fade out'/'fade in'
-    //      this.swap = function(content) {
-    //        this.$element().hide('slow').html(content).show('slow');
-    //      }
+    //        // implements a 'fade out'/'fade in'
+    //        this.swap = function(content, callback) {
+    //          var context = this;
+    //          context.$element().fadeOut('slow', function() {
+    //            context.$element().html(content);
+    //            context.$element().fadeIn('slow', function() {
+    //              if (callback) {
+    //                callback.apply();
+    //              }
+    //            });
+    //          });
+    //        };
     //
-    //      get('#/', function() {
-    //        this.partial('index.html.erb') // will fade out and in
     //      });
     //
-    //    });
-    //
-    swap: function(content) {
-      return this.$element().html(content);
+    swap: function(content, callback) {
+      var $el = this.$element().html(content);
+      if (_isFunction(callback)) { callback(content); }
+      return $el;
     },
 
     // a simple global cache for templates. Uses the same semantics as
     // `Sammy.Cache` and `Sammy.Storage` so can easily be replaced with
-    // a persistant storage that lasts beyond the current request.
+    // a persistent storage that lasts beyond the current request.
     templateCache: function(key, value) {
       if (typeof value != 'undefined') {
         return _template_cache[key] = value;
       }
     },
 
-    // This thows a '404 Not Found' error by invoking `error()`.
+    // clear the templateCache
+    clearTemplateCache: function() {
+      return (_template_cache = {});
+    },
+
+    // This throws a '404 Not Found' error by invoking `error()`.
     // Override this method or `error()` to provide custom
     // 404 behavior (i.e redirecting to / or showing a warning)
     notFound: function(verb, path) {
       // get current location
       location = this.getLocation();
       // compare to see if hash has changed
-      if (location != this.last_location) {
+      if (!this.last_location || this.last_location[0] != 'get' || this.last_location[1] != location) {
         // reset last location
-        this.last_location = location;
+        this.last_location = ['get', location];
         // lookup route for current hash
         returned = this.runRoute('get', location);
       }
     },
 
     _getFormVerb: function(form) {
-      var $form = $(form), verb;
+      var $form = $(form), verb, $_method;
       $_method = $form.find('input[name="_method"]');
       if ($_method.length > 0) { verb = $_method.val(); }
       if (!verb) { verb = $form[0].getAttribute('method'); }
+      if (!verb || verb === '') { verb = 'get'; }
       return $.trim(verb.toString().toLowerCase());
     },
 
       var $form, path, verb, params, returned;
       this.trigger('check-form-submission', {form: form});
       $form = $(form);
-      path  = $form.attr('action');
+      path  = $form.attr('action') || '';
       verb  = this._getFormVerb($form);
-      if (!verb || verb == '') { verb = 'get'; }
-      this.log('_checkFormSubmission', $form, path, verb);
+
+      if (this.debug) {
+        this.log('_checkFormSubmission', $form, path, verb);
+      }
+
       if (verb === 'get') {
-        this.setLocation(path + '?' + $form.serialize());
+        params = this._serializeFormParams($form);
+        if (params !== '') { path += '?' + params; }
+        this.setLocation(path);
         returned = false;
       } else {
         params = $.extend({}, this._parseFormParams($form));
         returned = this.runRoute(verb, path, params, form.get(0));
-      };
+      }
       return (typeof returned == 'undefined') ? false : returned;
     },
 
+    _serializeFormParams: function($form) {
+       var queryString = "",
+         fields = $form.serializeArray(),
+         i;
+       if (fields.length > 0) {
+         queryString = this._encodeFormPair(fields[0].name, fields[0].value);
+         for (i = 1; i < fields.length; i++) {
+           queryString = queryString + "&" + this._encodeFormPair(fields[i].name, fields[i].value);
+         }
+       }
+       return queryString;
+    },
+
+    _encodeFormPair: function(name, value){
+      return _encode(name) + "=" + _encode(value);
+    },
+
     _parseFormParams: function($form) {
       var params = {},
           form_fields = $form.serializeArray(),
       var params = {}, parts, pairs, pair, i;
 
       parts = path.match(QUERY_STRING_MATCHER);
-      if (parts) {
+      if (parts && parts[1]) {
         pairs = parts[1].split('&');
         for (i = 0; i < pairs.length; i++) {
           pair = pairs[i].split('=');
-          params = this._parseParamPair(params, _decode(pair[0]), _decode(pair[1]));
+          params = this._parseParamPair(params, _decode(pair[0]), _decode(pair[1] || ""));
         }
       }
       return params;
     },
 
     _parseParamPair: function(params, key, value) {
-      if (params[key]) {
+      if (typeof params[key] !== 'undefined') {
         if (_isArray(params[key])) {
           params[key].push(value);
         } else {
   });
 
   // `Sammy.RenderContext` is an object that makes sequential template loading,
-  // rendering and interpolation seamless even when dealing with asyncronous
+  // rendering and interpolation seamless even when dealing with asynchronous
   // operations.
   //
   // `RenderContext` objects are not usually created directly, rather they are
-  // instatiated from an `Sammy.EventContext` by using `render()`, `load()` or
+  // instantiated from an `Sammy.EventContext` by using `render()`, `load()` or
   // `partial()` which all return `RenderContext` objects.
   //
   // `RenderContext` methods always returns a modified `RenderContext`
   // The core magic is in the `then()` method which puts the callback passed as
   // an argument into a queue to be executed once the previous callback is complete.
   // All the methods of `RenderContext` are wrapped in `then()` which allows you
-  // to queue up methods by chaining, but maintaing a guarunteed execution order
+  // to queue up methods by chaining, but maintaining a guaranteed execution order
   // even with remote calls to fetch templates.
   //
   Sammy.RenderContext = function(event_context) {
     this.waiting          = false;
   };
 
-  $.extend(Sammy.RenderContext.prototype, {
+  Sammy.RenderContext.prototype = $.extend({}, Sammy.Object.prototype, {
 
     // The "core" of the `RenderContext` object, adds the `callback` to the
     // queue. If the context is `waiting` (meaning an async operation is happening)
     // is executed immediately.
     //
     // The value returned from the callback is stored in `content` for the
-    // subsiquent operation. If you return `false`, the queue will pause, and
+    // subsequent operation. If you return `false`, the queue will pause, and
     // the next callback in the queue will not be executed until `next()` is
-    // called. This allows for the guarunteed order of execution while working
+    // called. This allows for the guaranteed order of execution while working
     // with async operations.
     //
+    // If then() is passed a string instead of a function, the string is looked
+    // up as a helper method on the event context.
+    //
     // ### Example
     //
     //      this.get('#/', function() {
     //      });
     //
     then: function(callback) {
-      if (_isFunction(callback)) {
-        var context = this;
-        if (this.waiting) {
-          this.callbacks.push(callback);
+      if (!_isFunction(callback)) {
+        // if a string is passed to then, assume we want to call
+        // a helper on the event context in its context
+        if (typeof callback === 'string' && callback in this.event_context) {
+          var helper = this.event_context[callback];
+          callback = function(content) {
+            return helper.apply(this.event_context, [content]);
+          };
         } else {
-          this.wait();
-          setTimeout(function() {
-            var returned = callback.apply(context, [context.content, context.previous_content]);
-            if (returned !== false) {
-              context.next(returned);
-            }
-          }, 13);
+          return this;
         }
       }
+      var context = this;
+      if (this.waiting) {
+        this.callbacks.push(callback);
+      } else {
+        this.wait();
+        window.setTimeout(function() {
+          var returned = callback.apply(context, [context.content, context.previous_content]);
+          if (returned !== false) {
+            context.next(returned);
+          }
+        }, 0);
+      }
       return this;
     },
 
     },
 
     // Load a template into the context.
-    // The `location` can either be a string specifiying the remote path to the
+    // The `location` can either be a string specifying the remote path to the
     // file, a jQuery object, or a DOM element.
     //
     // No interpolation happens by default, the content is stored in
     load: function(location, options, callback) {
       var context = this;
       return this.then(function() {
-        var should_cache, cached;
+        var should_cache, cached, is_json, location_array;
         if (_isFunction(options)) {
           callback = options;
           options = {};
         }
         if (callback) { this.then(callback); }
         if (typeof location === 'string') {
-          // its a path
-          should_cache = !(options.cache === false);
+          // it's a path
+          is_json      = (location.match(/\.json(\?|$)/) || options.json);
+          should_cache = is_json ? options.cache === true : options.cache !== false;
+          context.next_engine = context.event_context.engineFor(location);
           delete options.cache;
+          delete options.json;
           if (options.engine) {
             context.next_engine = options.engine;
             delete options.engine;
           $.ajax($.extend({
             url: location,
             data: {},
+            dataType: is_json ? 'json' : 'text',
             type: 'get',
             success: function(data) {
               if (should_cache) {
           }, options));
           return false;
         } else {
-          // its a dom/jQuery
+          // it's a dom/jQuery
           if (location.nodeType) {
             return location.innerHTML;
           }
           if (location.selector) {
-            // its a jQuery
+            // it's a jQuery
             context.next_engine = location.attr('data-engine');
             if (options.clone === false) {
               return location.remove()[0].innerHTML.toString();
       });
     },
 
+    // Load partials
+    //
+    // ### Example
+    //
+    //      this.loadPartials({mypartial: '/path/to/partial'});
+    //
+    loadPartials: function(partials) {
+      var name;
+      if(partials) {
+        this.partials = this.partials || {};
+        for(name in partials) {
+          (function(context, name) {
+            context.load(partials[name])
+                   .then(function(template) {
+                     this.partials[name] = template;
+                   });
+          })(this, name);
+        }
+      }
+      return this;
+    },
+
     // `load()` a template and then `interpolate()` it with data.
     //
+    // can be called with multiple different signatures:
+    //
+    //      this.render(callback);
+    //      this.render('/location');
+    //      this.render('/location', {some: data});
+    //      this.render('/location', callback);
+    //      this.render('/location', {some: data}, callback);
+    //      this.render('/location', {some: data}, {my: partials});
+    //      this.render('/location', callback, {my: partials});
+    //      this.render('/location', {some: data}, callback, {my: partials});
+    //
     // ### Example
     //
     //      this.get('#/', function() {
     //        this.render('mytemplate.template', {name: 'test'});
     //      });
     //
-    render: function(location, data, callback) {
+    render: function(location, data, callback, partials) {
       if (_isFunction(location) && !data) {
+        // invoked as render(callback)
         return this.then(location);
       } else {
-        return this.load(location).interpolate(data, location).then(callback);
+        if(_isFunction(data)) {
+          // invoked as render(location, callback, [partials])
+          partials = callback;
+          callback = data;
+          data = null;
+        } else if(callback && !_isFunction(callback)) {
+          // invoked as render(location, data, partials)
+          partials = callback;
+          callback = null;
+        }
+
+        return this.loadPartials(partials)
+                   .load(location)
+                   .interpolate(data, location)
+                   .then(callback);
       }
     },
 
-    // itterates over an array, applying the callback for each item item. the
+    // `render()` the `location` with `data` and then `swap()` the
+    // app's `$element` with the rendered content.
+    partial: function(location, data, callback, partials) {
+      if (_isFunction(callback)) {
+        // invoked as partial(location, data, callback, [partials])
+        return this.render(location, data, partials).swap(callback);
+      } else if (_isFunction(data)) {
+        // invoked as partial(location, callback, [partials])
+        return this.render(location, {}, callback).swap(data);
+      } else {
+        // invoked as partial(location, data, [partials])
+        return this.render(location, data, callback).swap();
+      }
+    },
+
+    // defers the call of function to occur in order of the render queue.
+    // The function can accept any number of arguments as long as the last
+    // argument is a callback function. This is useful for putting arbitrary
+    // asynchronous functions into the queue. The content passed to the
+    // callback is passed as `content` to the next item in the queue.
+    //
+    // ### Example
+    //
+    //     this.send($.getJSON, '/app.json')
+    //         .then(function(json) {
+    //           $('#message).text(json['message']);
+    //          });
+    //
+    //
+    send: function() {
+      var context = this,
+          args = _makeArray(arguments),
+          fun  = args.shift();
+
+      if (_isArray(args[0])) { args = args[0]; }
+
+      return this.then(function(content) {
+        args.push(function(response) { context.next(response); });
+        context.wait();
+        fun.apply(fun, args);
+        return false;
+      });
+    },
+
+    // iterates over an array, applying the callback for each item item. the
     // callback takes the same style of arguments as `jQuery.each()` (index, item).
     // The return value of each callback is collected as a single string and stored
     // as `content` to be used in the next iteration of the `RenderContext`.
-    collect: function(array, callback) {
+    collect: function(array, callback, now) {
       var context = this;
-      return this.then(function() {
-        var contents = "";
+      var coll = function() {
+        if (_isFunction(array)) {
+          callback = array;
+          array = this.content;
+        }
+        var contents = [], doms = false;
         $.each(array, function(i, item) {
           var returned = callback.apply(context, [i, item]);
-          contents += returned;
+          if (returned.jquery && returned.length == 1) {
+            returned = returned[0];
+            doms = true;
+          }
+          contents.push(returned);
           return returned;
         });
-        return contents;
-      });
+        return doms ? contents : contents.join('');
+      };
+      return now ? coll() : this.then(coll);
     },
 
     // loads a template, and then interpolates it for each item in the `data`
-    // array.
+    // array. If a callback is passed, it will call the callback with each
+    // item in the array _after_ interpolation
     renderEach: function(location, name, data, callback) {
       if (_isArray(name)) {
         callback = data;
         data = name;
         name = null;
       }
-      if (!data && _isArray(this.content)) {
-        data = this.content;
-      }
-      return this.load(location).collect(data, function(i, value) {
-        var idata = {};
-        name ? (idata[name] = value) : (idata = value);
-        return this.event_context.interpolate(this.content, idata, location);
+      return this.load(location).then(function(content) {
+          var rctx = this;
+          if (!data) {
+            data = _isArray(this.previous_content) ? this.previous_content : [];
+          }
+          if (callback) {
+            $.each(data, function(i, value) {
+              var idata = {}, engine = this.next_engine || location;
+              if (name) {
+                idata[name] = value;
+              } else {
+                idata = value;
+              }
+              callback(value, rctx.event_context.interpolate(content, idata, engine));
+            });
+          } else {
+            return this.collect(data, function(i, value) {
+              var idata = {}, engine = this.next_engine || location;
+              if (name) {
+                idata[name] = value;
+              } else {
+                idata = value;
+              }
+              return this.event_context.interpolate(content, idata, engine);
+            }, true);
+          }
       });
     },
 
     interpolate: function(data, engine, retain) {
       var context = this;
       return this.then(function(content, prev) {
+        if (!data && prev) { data = prev; }
         if (this.next_engine) {
           engine = this.next_engine;
           this.next_engine = false;
         }
-        var rendered = context.event_context.interpolate(content, data, engine);
+        var rendered = context.event_context.interpolate(content, data, engine, this.partials);
         return retain ? prev + rendered : rendered;
       });
     },
 
-    // executes `EventContext#swap()` with the `content`
-    swap: function() {
+    // Swap the return contents ensuring order. See `Application#swap`
+    swap: function(callback) {
       return this.then(function(content) {
-        this.event_context.swap(content);
+        this.event_context.swap(content, callback);
+        return content;
       }).trigger('changed', {});
     },
 
     },
 
     // trigger the event in the order of the event context. Same semantics
-    // as `Sammy.EventContext#trigger()`. If data is ommitted, `content`
+    // as `Sammy.EventContext#trigger()`. If data is omitted, `content`
     // is sent as `{content: content}`
     trigger: function(name, data) {
       return this.then(function(content) {
         if (typeof data == 'undefined') { data = {content: content}; }
         this.event_context.trigger(name, data);
+        return content;
       });
     }
 
   //
   // ### Example
   //
-  //  $.sammy(function() {
-  //    // The context here is this Sammy.Application
-  //    this.get('#/:name', function() {
-  //      // The context here is a new Sammy.EventContext
-  //      if (this.params['name'] == 'sammy') {
-  //        this.partial('name.html.erb', {name: 'Sammy'});
-  //      } else {
-  //        this.redirect('#/somewhere-else')
-  //      }
-  //    });
-  //  });
+  //       $.sammy(function() {
+  //         // The context here is this Sammy.Application
+  //         this.get('#/:name', function() {
+  //           // The context here is a new Sammy.EventContext
+  //           if (this.params['name'] == 'sammy') {
+  //             this.partial('name.html.erb', {name: 'Sammy'});
+  //           } else {
+  //             this.redirect('#/somewhere-else')
+  //           }
+  //         });
+  //       });
   //
   // Initialize a new EventContext
   //
 
     // A shortcut to the app's `$element()`
     $element: function() {
-      return this.app.$element();
+      return this.app.$element(_makeArray(arguments).shift());
     },
 
     // Look up a templating engine within the current app and context.
     // `engine` can be one of the following:
     //
-    // * a function: should conform to `function(content, data) { return interploated; }`
+    // * a function: should conform to `function(content, data) { return interpolated; }`
     // * a template path: 'template.ejs', looks up the extension to match to
     //   the `ejs()` helper
-    // * a string referering to the helper: "mustache" => `mustache()`
+    // * a string referring to the helper: "mustache" => `mustache()`
     //
     // If no engine is found, use the app's default `template_engine`
     //
       // if path is actually an engine function just return it
       if (_isFunction(engine)) { return engine; }
       // lookup engine name by path extension
-      engine = engine.toString();
-      if ((engine_match = engine.match(/\.([^\.]+)$/))) {
+      engine = (engine || context.app.template_engine).toString();
+      if ((engine_match = engine.match(/\.([^\.\?\#]+)(\?|$)/))) {
         engine = engine_match[1];
       }
       // set the engine to the default template engine if no match is found
       if (engine && _isFunction(context[engine])) {
         return context[engine];
       }
+
       if (context.app.template_engine) {
         return this.engineFor(context.app.template_engine);
       }
 
     // using the template `engine` found with `engineFor()`, interpolate the
     // `data` into `content`
-    interpolate: function(content, data, engine) {
-      return this.engineFor(engine).apply(this, [content, data]);
+    interpolate: function(content, data, engine, partials) {
+      return this.engineFor(engine).apply(this, [content, data, partials]);
     },
 
     // Create and return a `Sammy.RenderContext` calling `render()` on it.
     //        .appendTo('ul');
     //      // appends the rendered content to $('ul')
     //
-    render: function(location, data, callback) {
-      return new Sammy.RenderContext(this).render(location, data, callback);
+    render: function(location, data, callback, partials) {
+      return new Sammy.RenderContext(this).render(location, data, callback, partials);
+    },
+
+    // Create and return a `Sammy.RenderContext` calling `renderEach()` on it.
+    // Loads the template and interpolates the data for each item,
+    // however does not actually place it in the DOM.
+    //
+    // `name` is an optional parameter (if it is an array, it is used as `data`,
+    // and the third parameter used as `callback`, if set).
+    //
+    // If `data` is not provided, content from the previous step in the chain
+    // (if it is an array) is used, and `name` is used as the key for each
+    // element of the array (useful for referencing in template).
+    //
+    // ### Example
+    //
+    //      // mytemplate.mustache <div class="name">{{name}}</div>
+    //      renderEach('mytemplate.mustache', [{name: 'quirkey'}, {name: 'endor'}])
+    //      // sets the `content` to <div class="name">quirkey</div><div class="name">endor</div>
+    //      renderEach('mytemplate.mustache', [{name: 'quirkey'}, {name: 'endor'}]).appendTo('ul');
+    //      // appends the rendered content to $('ul')
+    //
+    //      // names.json: ["quirkey", "endor"]
+    //      this.load('names.json').renderEach('mytemplate.mustache', 'name').appendTo('ul');
+    //      // uses the template to render each item in the JSON array
+    //
+    renderEach: function(location, name, data, callback) {
+      return new Sammy.RenderContext(this).renderEach(location, name, data, callback);
     },
 
     // create a new `Sammy.RenderContext` calling `load()` with `location` and
       return new Sammy.RenderContext(this).load(location, options, callback);
     },
 
-    // `render()` the the `location` with `data` and then `swap()` the
+    // create a new `Sammy.RenderContext` calling `loadPartials()` with `partials`.
+    loadPartials: function(partials) {
+      return new Sammy.RenderContext(this).loadPartials(partials);
+    },
+
+    // `render()` the `location` with `data` and then `swap()` the
     // app's `$element` with the rendered content.
-    partial: function(location, data) {
-      return this.render(location, data).swap();
+    partial: function(location, data, callback, partials) {
+      return new Sammy.RenderContext(this).partial(location, data, callback, partials);
+    },
+
+    // create a new `Sammy.RenderContext` calling `send()` with an arbitrary
+    // function
+    send: function() {
+      var rctx = new Sammy.RenderContext(this);
+      return rctx.send.apply(rctx, arguments);
     },
 
     // Changes the location of the current window. If `to` begins with
     // ### Example
     //
     //      redirect('#/other/route');
-    //      // equivilent to
+    //      // equivalent to
     //      redirect('#', 'other', 'route');
     //
     redirect: function() {
       var to, args = _makeArray(arguments),
-          current_location = this.app.getLocation();
-      if (args.length > 1) {
-        args.unshift('/');
-        to = this.join.apply(this, args);
+          current_location = this.app.getLocation(),
+          l = args.length;
+      if (l > 1) {
+        var i = 0, paths = [], pairs = [], params = {}, has_params = false;
+        for (; i < l; i++) {
+          if (typeof args[i] == 'string') {
+            paths.push(args[i]);
+          } else {
+            $.extend(params, args[i]);
+            has_params = true;
+          }
+        }
+        to = paths.join('/');
+        if (has_params) {
+          for (var k in params) {
+            pairs.push(this.app._encodeFormPair(k, params[k]));
+          }
+          to += '?' + pairs.join('&');
+        }
       } else {
         to = args[0];
       }
       this.trigger('redirect', {to: to});
-      this.app.last_location = this.path;
+      this.app.last_location = [this.verb, this.path];
       this.app.setLocation(to);
-      if (current_location == to) {
+      if (new RegExp(to).test(current_location)) {
         this.app.trigger('location-changed');
       }
     },
     },
 
     // A shortcut to app's `swap()`
-    swap: function(contents) {
-      return this.app.swap(contents);
+    swap: function(contents, callback) {
+      return this.app.swap(contents, callback);
     },
 
     // Raises a possible `notFound()` error for the current path.
       return this.app.notFound(this.verb, this.path);
     },
 
+    // Default JSON parsing uses jQuery's `parseJSON()`. Include `Sammy.JSON`
+    // plugin for the more conformant "crockford special".
+    json: function(string) {
+      return $.parseJSON(string);
+    },
+
     // //=> Sammy.EventContext: get #/ {}
     toString: function() {
       return "Sammy.EventContext: " + [this.verb, this.path, this.params].join(' ');
 
   });
 
-  // An alias to Sammy
-  $.sammy = window.Sammy = Sammy;
-
-})(jQuery);
+  return Sammy;
+});
diff --git a/deps/rabbitmq_management/priv/www/js/sammy.min.js b/deps/rabbitmq_management/priv/www/js/sammy.min.js
new file mode 100644 (file)
index 0000000..a08d509
--- /dev/null
@@ -0,0 +1,5 @@
+// -- Sammy.js -- /sammy.js
+// http://sammyjs.org
+// Version: 0.7.6
+// Built: 2014-08-26 10:45:34 +0300
+(function(factory){if(typeof define==="function"&&define.amd){define(["jquery"],factory)}else{jQuery.sammy=window.Sammy=factory(jQuery)}})(function($){var Sammy,PATH_REPLACER="([^/]+)",PATH_NAME_MATCHER=/:([\w\d]+)/g,QUERY_STRING_MATCHER=/\?([^#]*)?$/,_makeArray=function(nonarray){return Array.prototype.slice.call(nonarray)},_isFunction=function(obj){return Object.prototype.toString.call(obj)==="[object Function]"},_isArray=function(obj){return Object.prototype.toString.call(obj)==="[object Array]"},_isRegExp=function(obj){return Object.prototype.toString.call(obj)==="[object RegExp]"},_decode=function(str){return decodeURIComponent((str||"").replace(/\+/g," "))},_encode=encodeURIComponent,_escapeHTML=function(s){return String(s).replace(/&(?!\w+;)/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;")},_routeWrapper=function(verb){return function(){return this.route.apply(this,[verb].concat(Array.prototype.slice.call(arguments)))}},_template_cache={},_has_history=!!(window.history&&history.pushState),loggers=[];Sammy=function(){var args=_makeArray(arguments),app,selector;Sammy.apps=Sammy.apps||{};if(args.length===0||args[0]&&_isFunction(args[0])){return Sammy.apply(Sammy,["body"].concat(args))}else if(typeof(selector=args.shift())=="string"){app=Sammy.apps[selector]||new Sammy.Application;app.element_selector=selector;if(args.length>0){$.each(args,function(i,plugin){app.use(plugin)})}if(app.element_selector!=selector){delete Sammy.apps[selector]}Sammy.apps[app.element_selector]=app;return app}};Sammy.VERSION="0.7.6";Sammy.addLogger=function(logger){loggers.push(logger)};Sammy.log=function(){var args=_makeArray(arguments);args.unshift("["+Date()+"]");$.each(loggers,function(i,logger){logger.apply(Sammy,args)})};if(typeof window.console!="undefined"){if(typeof window.console.log==="function"&&_isFunction(window.console.log.apply)){Sammy.addLogger(function(){window.console.log.apply(window.console,arguments)})}else{Sammy.addLogger(function(){window.console.log(arguments)})}}else if(typeof console!="undefined"){Sammy.addLogger(function(){console.log.apply(console,arguments)})}$.extend(Sammy,{makeArray:_makeArray,isFunction:_isFunction,isArray:_isArray});Sammy.Object=function(obj){return $.extend(this,obj||{})};$.extend(Sammy.Object.prototype,{escapeHTML:_escapeHTML,h:_escapeHTML,toHash:function(){var json={};$.each(this,function(k,v){if(!_isFunction(v)){json[k]=v}});return json},toHTML:function(){var display="";$.each(this,function(k,v){if(!_isFunction(v)){display+="<strong>"+k+"</strong> "+v+"<br />"}});return display},keys:function(attributes_only){var keys=[];for(var property in this){if(!_isFunction(this[property])||!attributes_only){keys.push(property)}}return keys},has:function(key){return this[key]&&$.trim(this[key].toString())!==""},join:function(){var args=_makeArray(arguments);var delimiter=args.shift();return args.join(delimiter)},log:function(){Sammy.log.apply(Sammy,arguments)},toString:function(include_functions){var s=[];$.each(this,function(k,v){if(!_isFunction(v)||include_functions){s.push('"'+k+'": '+v.toString())}});return"Sammy.Object: {"+s.join(",")+"}"}});Sammy.targetIsThisWindow=function targetIsThisWindow(event,tagName){var targetElement=$(event.target).closest(tagName);if(targetElement.length===0){return true}var targetWindow=targetElement.attr("target");if(!targetWindow||targetWindow===window.name||targetWindow==="_self"){return true}if(targetWindow==="_blank"){return false}if(targetWindow==="top"&&window===window.top){return true}return false};Sammy.DefaultLocationProxy=function(app,run_interval_every){this.app=app;this.is_native=false;this.has_history=_has_history;this._startPolling(run_interval_every)};Sammy.DefaultLocationProxy.fullPath=function(location_obj){var matches=location_obj.toString().match(/^[^#]*(#.+)$/);var hash=matches?matches[1]:"";return[location_obj.pathname,location_obj.search,hash].join("")};$.extend(Sammy.DefaultLocationProxy.prototype,{bind:function(){var proxy=this,app=this.app,lp=Sammy.DefaultLocationProxy;$(window).bind("hashchange."+this.app.eventNamespace(),function(e,non_native){if(proxy.is_native===false&&!non_native){proxy.is_native=true;window.clearInterval(lp._interval);lp._interval=null}app.trigger("location-changed")});if(_has_history&&!app.disable_push_state){$(window).bind("popstate."+this.app.eventNamespace(),function(e){app.trigger("location-changed")});$(document).delegate("a","click.history-"+this.app.eventNamespace(),function(e){if(e.isDefaultPrevented()||e.metaKey||e.ctrlKey){return}var full_path=lp.fullPath(this),hostname=this.hostname?this.hostname:function(a){var l=document.createElement("a");l.href=a.href;return l.hostname}(this);if(hostname==window.location.hostname&&app.lookupRoute("get",full_path)&&Sammy.targetIsThisWindow(e,"a")){e.preventDefault();proxy.setLocation(full_path);return false}})}if(!lp._bindings){lp._bindings=0}lp._bindings++},unbind:function(){$(window).unbind("hashchange."+this.app.eventNamespace());$(window).unbind("popstate."+this.app.eventNamespace());$(document).undelegate("a","click.history-"+this.app.eventNamespace());Sammy.DefaultLocationProxy._bindings--;if(Sammy.DefaultLocationProxy._bindings<=0){window.clearInterval(Sammy.DefaultLocationProxy._interval);Sammy.DefaultLocationProxy._interval=null}},getLocation:function(){return Sammy.DefaultLocationProxy.fullPath(window.location)},setLocation:function(new_location){if(/^([^#\/]|$)/.test(new_location)){if(_has_history&&!this.app.disable_push_state){new_location="/"+new_location}else{new_location="#!/"+new_location}}if(new_location!=this.getLocation()){if(_has_history&&!this.app.disable_push_state&&/^\//.test(new_location)){history.pushState({path:new_location},window.title,new_location);this.app.trigger("location-changed")}else{return window.location=new_location}}},_startPolling:function(every){var proxy=this;if(!Sammy.DefaultLocationProxy._interval){if(!every){every=10}var hashCheck=function(){var current_location=proxy.getLocation();if(typeof Sammy.DefaultLocationProxy._last_location=="undefined"||current_location!=Sammy.DefaultLocationProxy._last_location){window.setTimeout(function(){$(window).trigger("hashchange",[true])},0)}Sammy.DefaultLocationProxy._last_location=current_location};hashCheck();Sammy.DefaultLocationProxy._interval=window.setInterval(hashCheck,every)}}});Sammy.Application=function(app_function){var app=this;this.routes={};this.listeners=new Sammy.Object({});this.arounds=[];this.befores=[];this.namespace=(new Date).getTime()+"-"+parseInt(Math.random()*1e3,10);this.context_prototype=function(){Sammy.EventContext.apply(this,arguments)};this.context_prototype.prototype=new Sammy.EventContext;if(_isFunction(app_function)){app_function.apply(this,[this])}if(!this._location_proxy){this.setLocationProxy(new Sammy.DefaultLocationProxy(this,this.run_interval_every))}if(this.debug){this.bindToAllEvents(function(e,data){app.log(app.toString(),e.cleaned_type,data||{})})}};Sammy.Application.prototype=$.extend({},Sammy.Object.prototype,{ROUTE_VERBS:["get","post","put","delete"],APP_EVENTS:["run","unload","lookup-route","run-route","route-found","event-context-before","event-context-after","changed","error","check-form-submission","redirect","location-changed"],_last_route:null,_location_proxy:null,_running:false,element_selector:"body",debug:false,raise_errors:false,run_interval_every:50,disable_push_state:false,template_engine:null,toString:function(){return"Sammy.Application:"+this.element_selector},$element:function(selector){return selector?$(this.element_selector).find(selector):$(this.element_selector)},use:function(){var args=_makeArray(arguments),plugin=args.shift(),plugin_name=plugin||"";try{args.unshift(this);if(typeof plugin=="string"){plugin_name="Sammy."+plugin;plugin=Sammy[plugin]}plugin.apply(this,args)}catch(e){if(typeof plugin==="undefined"){this.error("Plugin Error: called use() but plugin ("+plugin_name.toString()+") is not defined",e)}else if(!_isFunction(plugin)){this.error("Plugin Error: called use() but '"+plugin_name.toString()+"' is not a function",e)}else{this.error("Plugin Error",e)}}return this},setLocationProxy:function(new_proxy){var original_proxy=this._location_proxy;this._location_proxy=new_proxy;if(this.isRunning()){if(original_proxy){original_proxy.unbind()}this._location_proxy.bind()}},log:function(){Sammy.log.apply(Sammy,Array.prototype.concat.apply([this.element_selector],arguments))},route:function(verb,path){var app=this,param_names=[],add_route,path_match,callback=Array.prototype.slice.call(arguments,2);if(callback.length===0&&_isFunction(path)){callback=[path];path=verb;verb="any"}verb=verb.toLowerCase();if(path.constructor==String){PATH_NAME_MATCHER.lastIndex=0;while((path_match=PATH_NAME_MATCHER.exec(path))!==null){param_names.push(path_match[1])}path=new RegExp(path.replace(PATH_NAME_MATCHER,PATH_REPLACER)+"$")}$.each(callback,function(i,cb){if(typeof cb==="string"){callback[i]=app[cb]}});add_route=function(with_verb){var r={verb:with_verb,path:path,callback:callback,param_names:param_names};app.routes[with_verb]=app.routes[with_verb]||[];app.routes[with_verb].push(r)};if(verb==="any"){$.each(this.ROUTE_VERBS,function(i,v){add_route(v)})}else{add_route(verb)}return this},get:_routeWrapper("get"),post:_routeWrapper("post"),put:_routeWrapper("put"),del:_routeWrapper("delete"),any:_routeWrapper("any"),mapRoutes:function(route_array){var app=this;$.each(route_array,function(i,route_args){app.route.apply(app,route_args)});return this},eventNamespace:function(){return["sammy-app",this.namespace].join("-")},bind:function(name,data,callback){var app=this;if(typeof callback=="undefined"){callback=data}var listener_callback=function(){var e,context,data;e=arguments[0];data=arguments[1];if(data&&data.context){context=data.context;delete data.context}else{context=new app.context_prototype(app,"bind",e.type,data,e.target)}e.cleaned_type=e.type.replace(app.eventNamespace(),"");callback.apply(context,[e,data])};if(!this.listeners[name]){this.listeners[name]=[]}this.listeners[name].push(listener_callback);if(this.isRunning()){this._listen(name,listener_callback)}return this},trigger:function(name,data){this.$element().trigger([name,this.eventNamespace()].join("."),[data]);return this},refresh:function(){this.last_location=null;this.trigger("location-changed");return this},before:function(options,callback){if(_isFunction(options)){callback=options;options={}}this.befores.push([options,callback]);return this},after:function(callback){return this.bind("event-context-after",callback)},around:function(callback){this.arounds.push(callback);return this},onComplete:function(callback){this._onComplete=callback;return this},isRunning:function(){return this._running},helpers:function(extensions){$.extend(this.context_prototype.prototype,extensions);return this},helper:function(name,method){this.context_prototype.prototype[name]=method;return this},run:function(start_url){if(this.isRunning()){return false}var app=this;$.each(this.listeners.toHash(),function(name,callbacks){$.each(callbacks,function(i,listener_callback){app._listen(name,listener_callback)})});this.trigger("run",{start_url:start_url});this._running=true;this.last_location=null;if(!/\#(.+)/.test(this.getLocation())&&typeof start_url!="undefined"){this.setLocation(start_url)}this._checkLocation();this._location_proxy.bind();this.bind("location-changed",function(){app._checkLocation()});this.bind("submit",function(e){if(!Sammy.targetIsThisWindow(e,"form")){return true}var returned=app._checkFormSubmission($(e.target).closest("form"));return returned===false?e.preventDefault():false});$(window).bind("unload",function(){app.unload()});return this.trigger("changed")},unload:function(){if(!this.isRunning()){return false}var app=this;this.trigger("unload");this._location_proxy.unbind();this.$element().unbind("submit").removeClass(app.eventNamespace());$.each(this.listeners.toHash(),function(name,listeners){$.each(listeners,function(i,listener_callback){app._unlisten(name,listener_callback)})});this._running=false;return this},destroy:function(){this.unload();delete Sammy.apps[this.element_selector];return this},bindToAllEvents:function(callback){var app=this;$.each(this.APP_EVENTS,function(i,e){app.bind(e,callback)});$.each(this.listeners.keys(true),function(i,name){if($.inArray(name,app.APP_EVENTS)==-1){app.bind(name,callback)}});return this},routablePath:function(path){return path.replace(QUERY_STRING_MATCHER,"")},lookupRoute:function(verb,path){var app=this,routed=false,i=0,l,route;if(typeof this.routes[verb]!="undefined"){l=this.routes[verb].length;for(;i<l;i++){route=this.routes[verb][i];if(app.routablePath(path).match(route.path)){routed=route;break}}}return routed},runRoute:function(verb,path,params,target){var app=this,route=this.lookupRoute(verb,path),context,wrapped_route,arounds,around,befores,before,callback_args,path_params,final_returned;if(this.debug){this.log("runRoute",[verb,path].join(" "))}this.trigger("run-route",{verb:verb,path:path,params:params});if(typeof params=="undefined"){params={}}$.extend(params,this._parseQueryString(path));if(route){this.trigger("route-found",{route:route});if((path_params=route.path.exec(this.routablePath(path)))!==null){path_params.shift();$.each(path_params,function(i,param){if(route.param_names[i]){params[route.param_names[i]]=_decode(param)}else{if(!params.splat){params.splat=[]}params.splat.push(_decode(param))}})}context=new this.context_prototype(this,verb,path,params,target);arounds=this.arounds.slice(0);befores=this.befores.slice(0);callback_args=[context];if(params.splat){callback_args=callback_args.concat(params.splat)}wrapped_route=function(){var returned,i,nextRoute;while(befores.length>0){before=befores.shift();if(app.contextMatchesOptions(context,before[0])){returned=before[1].apply(context,[context]);if(returned===false){return false}}}app.last_route=route;context.trigger("event-context-before",{context:context});if(typeof route.callback==="function"){route.callback=[route.callback]}if(route.callback&&route.callback.length){i=-1;nextRoute=function(){i++;if(route.callback[i]){returned=route.callback[i].apply(context,callback_args)}else if(app._onComplete&&typeof(app._onComplete==="function")){app._onComplete(context)}};callback_args.push(nextRoute);nextRoute()}context.trigger("event-context-after",{context:context});return returned};$.each(arounds.reverse(),function(i,around){var last_wrapped_route=wrapped_route;wrapped_route=function(){return around.apply(context,[last_wrapped_route])}});try{final_returned=wrapped_route()}catch(e){this.error(["500 Error",verb,path].join(" "),e)}return final_returned}else{return this.notFound(verb,path)}},contextMatchesOptions:function(context,match_options,positive){var options=match_options;if(typeof options==="string"||_isRegExp(options)){options={path:options}}if(typeof positive==="undefined"){positive=true}if($.isEmptyObject(options)){return true}if(_isArray(options.path)){var results,numopt,opts,len;results=[];for(numopt=0,len=options.path.length;numopt<len;numopt+=1){opts=$.extend({},options,{path:options.path[numopt]});results.push(this.contextMatchesOptions(context,opts))}var matched=$.inArray(true,results)>-1?true:false;return positive?matched:!matched}if(options.only){return this.contextMatchesOptions(context,options.only,true)}else if(options.except){return this.contextMatchesOptions(context,options.except,false)}var path_matched=true,verb_matched=true;if(options.path){if(!_isRegExp(options.path)){options.path=new RegExp(options.path.toString()+"$")}path_matched=options.path.test(context.path)}if(options.verb){if(typeof options.verb==="string"){verb_matched=options.verb===context.verb}else{verb_matched=options.verb.indexOf(context.verb)>-1}}return positive?verb_matched&&path_matched:!(verb_matched&&path_matched)},getLocation:function(){return this._location_proxy.getLocation()},setLocation:function(new_location){return this._location_proxy.setLocation(new_location)},swap:function(content,callback){var $el=this.$element().html(content);if(_isFunction(callback)){callback(content)}return $el},templateCache:function(key,value){if(typeof value!="undefined"){return _template_cache[key]=value}else{return _template_cache[key]}},clearTemplateCache:function(){return _template_cache={}},notFound:function(verb,path){var ret=this.error(["404 Not Found",verb,path].join(" "));return verb==="get"?ret:true},error:function(message,original_error){if(!original_error){original_error=new Error}original_error.message=[message,original_error.message].join(" ");this.trigger("error",{message:original_error.message,error:original_error});if(this.raise_errors){throw original_error}else{this.log(original_error.message,original_error)}},_checkLocation:function(){var location,returned;location=this.getLocation();if(!this.last_location||this.last_location[0]!="get"||this.last_location[1]!=location){this.last_location=["get",location];returned=this.runRoute("get",location)}return returned},_getFormVerb:function(form){var $form=$(form),verb,$_method;$_method=$form.find('input[name="_method"]');if($_method.length>0){verb=$_method.val()}if(!verb){verb=$form[0].getAttribute("method")}if(!verb||verb===""){verb="get"}return $.trim(verb.toString().toLowerCase())},_checkFormSubmission:function(form){var $form,path,verb,params,returned;this.trigger("check-form-submission",{form:form});$form=$(form);path=$form.attr("action")||"";verb=this._getFormVerb($form);if(this.debug){this.log("_checkFormSubmission",$form,path,verb)}if(verb==="get"){params=this._serializeFormParams($form);if(params!==""){path+="?"+params}this.setLocation(path);returned=false}else{params=$.extend({},this._parseFormParams($form));returned=this.runRoute(verb,path,params,form.get(0))}return typeof returned=="undefined"?false:returned},_serializeFormParams:function($form){var queryString="",fields=$form.serializeArray(),i;if(fields.length>0){queryString=this._encodeFormPair(fields[0].name,fields[0].value);for(i=1;i<fields.length;i++){queryString=queryString+"&"+this._encodeFormPair(fields[i].name,fields[i].value)}}return queryString},_encodeFormPair:function(name,value){return _encode(name)+"="+_encode(value)},_parseFormParams:function($form){var params={},form_fields=$form.serializeArray(),i;for(i=0;i<form_fields.length;i++){params=this._parseParamPair(params,form_fields[i].name,form_fields[i].value)}return params},_parseQueryString:function(path){var params={},parts,pairs,pair,i;parts=path.match(QUERY_STRING_MATCHER);if(parts&&parts[1]){pairs=parts[1].split("&");for(i=0;i<pairs.length;i++){pair=pairs[i].split("=");params=this._parseParamPair(params,_decode(pair[0]),_decode(pair[1]||""))}}return params},_parseParamPair:function(params,key,value){if(typeof params[key]!=="undefined"){if(_isArray(params[key])){params[key].push(value)}else{params[key]=[params[key],value]}}else{params[key]=value}return params},_listen:function(name,callback){return this.$element().bind([name,this.eventNamespace()].join("."),callback)},_unlisten:function(name,callback){return this.$element().unbind([name,this.eventNamespace()].join("."),callback)}});Sammy.RenderContext=function(event_context){this.event_context=event_context;this.callbacks=[];this.previous_content=null;this.content=null;this.next_engine=false;this.waiting=false};Sammy.RenderContext.prototype=$.extend({},Sammy.Object.prototype,{then:function(callback){if(!_isFunction(callback)){if(typeof callback==="string"&&callback in this.event_context){var helper=this.event_context[callback];callback=function(content){return helper.apply(this.event_context,[content])}}else{return this}}var context=this;if(this.waiting){this.callbacks.push(callback)}else{this.wait();window.setTimeout(function(){var returned=callback.apply(context,[context.content,context.previous_content]);if(returned!==false){context.next(returned)}},0)}return this},wait:function(){this.waiting=true},next:function(content){this.waiting=false;if(typeof content!=="undefined"){this.previous_content=this.content;this.content=content}if(this.callbacks.length>0){this.then(this.callbacks.shift())}},load:function(location,options,callback){var context=this;return this.then(function(){var should_cache,cached,is_json,location_array;if(_isFunction(options)){callback=options;options={}}else{options=$.extend({},options)}if(callback){this.then(callback)}if(typeof location==="string"){is_json=location.match(/\.json(\?|$)/)||options.json;should_cache=is_json?options.cache===true:options.cache!==false;context.next_engine=context.event_context.engineFor(location);delete options.cache;delete options.json;if(options.engine){context.next_engine=options.engine;delete options.engine}if(should_cache&&(cached=this.event_context.app.templateCache(location))){return cached}this.wait();$.ajax($.extend({url:location,data:{},dataType:is_json?"json":"text",type:"get",success:function(data){if(should_cache){context.event_context.app.templateCache(location,data)}context.next(data)}},options));return false}else{if(location.nodeType){return location.innerHTML}if(location.selector){context.next_engine=location.attr("data-engine");if(options.clone===false){return location.remove()[0].innerHTML.toString()}else{return location[0].innerHTML.toString()}}}})},loadPartials:function(partials){var name;if(partials){this.partials=this.partials||{};for(name in partials){(function(context,name){context.load(partials[name]).then(function(template){this.partials[name]=template})})(this,name)}}return this},render:function(location,data,callback,partials){if(_isFunction(location)&&!data){return this.then(location)}else{if(_isFunction(data)){partials=callback;callback=data;data=null}else if(callback&&!_isFunction(callback)){partials=callback;callback=null}return this.loadPartials(partials).load(location).interpolate(data,location).then(callback)}},partial:function(location,data,callback,partials){if(_isFunction(callback)){return this.render(location,data,partials).swap(callback)}else if(_isFunction(data)){return this.render(location,{},callback).swap(data)}else{return this.render(location,data,callback).swap()}},send:function(){var context=this,args=_makeArray(arguments),fun=args.shift();if(_isArray(args[0])){args=args[0]}return this.then(function(content){args.push(function(response){context.next(response)});context.wait();fun.apply(fun,args);return false})},collect:function(array,callback,now){var context=this;var coll=function(){if(_isFunction(array)){callback=array;array=this.content}var contents=[],doms=false;$.each(array,function(i,item){var returned=callback.apply(context,[i,item]);if(returned.jquery&&returned.length==1){returned=returned[0];doms=true}contents.push(returned);return returned});return doms?contents:contents.join("")};return now?coll():this.then(coll)},renderEach:function(location,name,data,callback){if(_isArray(name)){callback=data;data=name;name=null}return this.load(location).then(function(content){var rctx=this;if(!data){data=_isArray(this.previous_content)?this.previous_content:[]}if(callback){$.each(data,function(i,value){var idata={},engine=this.next_engine||location;if(name){idata[name]=value}else{idata=value}callback(value,rctx.event_context.interpolate(content,idata,engine))})}else{return this.collect(data,function(i,value){var idata={},engine=this.next_engine||location;if(name){idata[name]=value}else{idata=value}return this.event_context.interpolate(content,idata,engine)},true)}})},interpolate:function(data,engine,retain){var context=this;return this.then(function(content,prev){if(!data&&prev){data=prev}if(this.next_engine){engine=this.next_engine;this.next_engine=false}var rendered=context.event_context.interpolate(content,data,engine,this.partials);return retain?prev+rendered:rendered})},swap:function(callback){return this.then(function(content){this.event_context.swap(content,callback);return content}).trigger("changed",{})},appendTo:function(selector){return this.then(function(content){$(selector).append(content)}).trigger("changed",{})},prependTo:function(selector){return this.then(function(content){$(selector).prepend(content)}).trigger("changed",{})},replace:function(selector){return this.then(function(content){$(selector).html(content)}).trigger("changed",{})},trigger:function(name,data){return this.then(function(content){if(typeof data=="undefined"){data={content:content}}this.event_context.trigger(name,data);return content})}});Sammy.EventContext=function(app,verb,path,params,target){this.app=app;this.verb=verb;this.path=path;this.params=new Sammy.Object(params);this.target=target};Sammy.EventContext.prototype=$.extend({},Sammy.Object.prototype,{$element:function(){return this.app.$element(_makeArray(arguments).shift())},engineFor:function(engine){var context=this,engine_match;if(_isFunction(engine)){return engine}engine=(engine||context.app.template_engine).toString();if(engine_match=engine.match(/\.([^\.\?\#]+)(\?|$)/)){engine=engine_match[1]}if(engine&&_isFunction(context[engine])){return context[engine]}if(context.app.template_engine){return this.engineFor(context.app.template_engine)}return function(content,data){return content}},interpolate:function(content,data,engine,partials){return this.engineFor(engine).apply(this,[content,data,partials])},render:function(location,data,callback,partials){return new Sammy.RenderContext(this).render(location,data,callback,partials)},renderEach:function(location,name,data,callback){return new Sammy.RenderContext(this).renderEach(location,name,data,callback)},load:function(location,options,callback){return new Sammy.RenderContext(this).load(location,options,callback)},loadPartials:function(partials){return new Sammy.RenderContext(this).loadPartials(partials)},partial:function(location,data,callback,partials){return new Sammy.RenderContext(this).partial(location,data,callback,partials)},send:function(){var rctx=new Sammy.RenderContext(this);return rctx.send.apply(rctx,arguments)},redirect:function(){var to,args=_makeArray(arguments),current_location=this.app.getLocation(),l=args.length;if(l>1){var i=0,paths=[],pairs=[],params={},has_params=false;for(;i<l;i++){if(typeof args[i]=="string"){paths.push(args[i])}else{$.extend(params,args[i]);has_params=true}}to=paths.join("/");if(has_params){for(var k in params){pairs.push(this.app._encodeFormPair(k,params[k]))}to+="?"+pairs.join("&")}}else{to=args[0]}this.trigger("redirect",{to:to});this.app.last_location=[this.verb,this.path];this.app.setLocation(to);if(new RegExp(to).test(current_location)){this.app.trigger("location-changed")}},trigger:function(name,data){if(typeof data=="undefined"){data={}}if(!data.context){data.context=this}return this.app.trigger(name,data)},eventNamespace:function(){return this.app.eventNamespace()},swap:function(contents,callback){return this.app.swap(contents,callback)},notFound:function(){return this.app.notFound(this.verb,this.path)},json:function(string){return $.parseJSON(string)},toString:function(){return"Sammy.EventContext: "+[this.verb,this.path,this.params].join(" ")}});return Sammy});
\ No newline at end of file
similarity index 64%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channel.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs
index 1e9d18e7f98a9878669d67f939b34e7df45ffe9a..b5e3c8cdb9be1cd9a5b692ff51e9ca92ec751bf1 100644 (file)
 </div>
 </div>
 <% } %>
+
+<% if(channel.reductions || channel.garbage_collection) { %>
+<div class="section-hidden">
+<h2>Runtime Metrics (Advanced)</h2>
+ <div class="hider updatable">
+ <%= data_reductions('reductions-rates-conn', channel) %>
+ <table class="facts">
+    <% if (channel.garbage_collection.min_bin_vheap_size) { %>
+        <tr>
+        <th>Minimum binary virtual heap size in words (min_bin_vheap_size)</th>
+        <td><%= channel.garbage_collection.min_bin_vheap_size %></td>
+        </tr>
+    <% } %>
+
+    <% if (channel.garbage_collection.min_heap_size) { %>
+        <tr>
+        <th>Minimum heap size in words (min_heap_size)</th>
+        <td><%= channel.garbage_collection.min_heap_size %></td>
+        </tr>
+    <% } %>
+
+    <% if (channel.garbage_collection.fullsweep_after) { %>
+        <tr>
+        <th>Maximum generational collections before fullsweep (fullsweep_after)</th>
+        <td><%= channel.garbage_collection.fullsweep_after %></td>
+        </tr>
+    <% } %>
+
+    <% if (channel.garbage_collection.minor_gcs) { %>
+        <tr>
+        <th>Number of minor GCs (minor_gcs)</th>
+        <td><%= channel.garbage_collection.minor_gcs %></td>
+        </tr>
+    <% } %>
+ </table>
+ </div>
+</div>
+
+<% } %>
+
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/channels.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/channels.ejs
new file mode 100644 (file)
index 0000000..f7cca2b
--- /dev/null
@@ -0,0 +1,7 @@
+<h1>Channels</h1>
+<div class="section">
+ <%= pagiante_ui(channels, 'channels') %>
+</div> 
+<div class="updatable">
+  <%= format('channels-list', {'channels': channels.items, 'mode': 'standalone'}) %>
+</div>
similarity index 66%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/connection.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs
index ae1ce9b08011d7a822706f650bb8f37341fb2651..eb79fb9fdbd66f6a51096447162f86c9d032886d 100644 (file)
@@ -1,4 +1,4 @@
-<h1>Connection <b><%= fmt_string(connection.name) %></b><%= fmt_maybe_vhost(connection.vhost) %></h1>
+<h1>Connection <%= fmt_string(connection.name) %> <%= fmt_maybe_vhost(connection.vhost) %></h1>
 
 <div class="section">
 <h2>Overview</h2>
   <td><%= fmt_node(connection.node) %></td>
 </tr>
 <% } %>
+
+<% if (connection.client_properties.connection_name) { %>
+<tr>
+  <th>Client-provided name</th>
+  <td><%= fmt_string(connection.client_properties.connection_name) %></td>
+</tr>
+<% } %>
+
 <tr>
  <th>Username</th>
  <td><%= fmt_string(connection.user) %></td>
@@ -60,6 +68,7 @@
  <td><%= connection.channel_max %> channels</td>
 </tr>
 </table>
+
 <% } %>
 
 </div>
 </div>
 <% } %>
 
+<% if(connection.reductions || connection.garbage_collection) { %>
+<div class="section-hidden">
+<h2>Runtime Metrics (Advanced)</h2>
+ <div class="hider updatable">
+ <%= data_reductions('reductions-rates-conn', connection) %>
+ <table class="facts">
+    <% if (connection.garbage_collection.min_bin_vheap_size) { %>
+        <tr>
+        <th>Minimum binary virtual heap size in words (min_bin_vheap_size)</th>
+        <td><%= connection.garbage_collection.min_bin_vheap_size %></td>
+        </tr>
+    <% } %>
+
+    <% if (connection.garbage_collection.min_heap_size) { %>
+        <tr>
+        <th>Minimum heap size in words (min_heap_size)</th>
+        <td><%= connection.garbage_collection.min_heap_size %></td>
+        </tr>
+    <% } %>
+
+    <% if (connection.garbage_collection.fullsweep_after) { %>
+        <tr>
+        <th>Maximum generational collections before fullsweep (fullsweep_after)</th>
+        <td><%= connection.garbage_collection.fullsweep_after %></td>
+        </tr>
+    <% } %>
+
+    <% if (connection.garbage_collection.minor_gcs) { %>
+        <tr>
+        <th>Number of minor GCs (minor_gcs)</th>
+        <td><%= connection.garbage_collection.minor_gcs %></td>
+        </tr>
+    <% } %>
+ </table>
+ </div>
+</div>
+
+<% } %>
+
+
 <div class="section-hidden">
   <h2>Close this connection</h2>
   <div class="hider">
similarity index 89%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/connections.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/connections.ejs
index 317328185e3bdb6ac183470de4b5fa3f6ded934d..7b71051623ab8abd64462975a3d433bf337877af 100644 (file)
@@ -1,7 +1,9 @@
 <h1>Connections</h1>
-<%= filter_ui(connections) %>
+<div class="section">
+ <%= pagiante_ui(connections, 'connections') %>
+</div> 
 <div class="updatable">
-<% if (connections.length > 0) { %>
+<% if (connections.items.length > 0) { %>
 <table class="list">
  <thead>
   <tr>
@@ -14,7 +16,7 @@
 <% if (vhosts_interesting) { %>
     <th><%= fmt_sort('Virtual host', 'vhost') %></th>
 <% } %>
-    <th><%= fmt_sort('Name',           'name') %></th>
+    <th><%= fmt_sort('Name',           'client_properties.connection_name;name') %></th>
 <% if (nodes_interesting) { %>
     <th><%= fmt_sort('Node',           'node') %></th>
 <% } %>
  </thead>
  <tbody>
 <%
- for (var i = 0; i < connections.length; i++) {
-    var connection = connections[i];
+ for (var i = 0; i < connections.items.length; i++) {
+    var connection = connections.items[i];
 %>
   <tr<%= alt_rows(i)%>>
 <% if (vhosts_interesting) { %>
     <td><%= fmt_string(connection.vhost) %></td>
 <% } %>
+<% if(connection.client_properties.connection_name) { %>
+    <td><%= link_conn(connection.name) %>
+        <%= fmt_string(short_conn(connection.client_properties.connection_name)) %>
+    </td>
+<% } else { %>
     <td><%= link_conn(connection.name) %></td>
+<% } %>
 <% if (nodes_interesting) { %>
     <td><%= fmt_node(connection.node) %></td>
 <% } %>
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/exchange.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/exchange.ejs
index 4eb2496554f29b4a7aa8ab021cdd34b9f2575928..cad9ba6adf2743ade921863d36945946eb547dc7 100644 (file)
@@ -56,7 +56,7 @@
   <h3>Default exchange</h3>
   <p>
     The default exchange is implicitly bound to every queue, with a
-    routing key equal to the queue name. It it not possible to
+    routing key equal to the queue name. It is not possible to
     explicitly bind to, or unbind from the default exchange. It also
     cannot be deleted.
   </p>
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/exchanges.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/exchanges.ejs
index 58589d5d6fa99272e9c717da3467a035a037998f..b7760ee468a045886d483b6ea5cccc3de57c9d3d 100644 (file)
@@ -1,10 +1,9 @@
 <h1>Exchanges</h1>
 <div class="section">
-  <h2>All exchanges</h2>
-  <div class="hider">
-<%= filter_ui(exchanges) %>
-  <div class="updatable">
-<% if (exchanges.length > 0) { %>
+   <%= pagiante_ui(exchanges, 'exchanges') %>
+</div> 
+<div class="updatable">
+<% if (exchanges.items.length > 0) { %>
 <table class="list">
  <thead>
   <tr>
@@ -37,8 +36,8 @@
  </thead>
  <tbody>
 <%
-  for (var i = 0; i < exchanges.length; i++) {
-    var exchange = exchanges[i];
+  for (var i = 0; i < exchanges.items.length; i++) {
+    var exchange = exchanges.items[i];
 %>
   <tr<%= alt_rows(i, exchange.arguments)%>>
 <% if (vhosts_interesting) { %>
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/node.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/node.ejs
index adde0cfca32f0e0778a4e7eb07bf63e4a5a3979f..8cd71bb1683022367e0f06c8044728a7568e4ee0 100644 (file)
        ['Write', 'io_write_count'],
        ['Seek', 'io_seek_count'],
        ['Sync', 'io_sync_count'],
-       ['Reopen', 'io_reopen_count']],
+       ['File handle reopen', 'io_reopen_count'],
+       ['File handle open attempt', 'io_file_handle_open_attempt_count']],
       fmt_rate, fmt_rate_axis, true, 'I/O operations', 'io-operations') %>
 
   <%= rates_chart_or_text('persister-io-stats-bytes', node,
       [['Read', 'io_read_avg_time'],
        ['Write', 'io_write_avg_time'],
        ['Seek', 'io_seek_avg_time'],
-       ['Sync', 'io_sync_avg_time']],
+       ['Sync', 'io_sync_avg_time'],
+       ['File handle open attempt', 'io_file_handle_open_attempt_avg_time']],
       fmt_ms, fmt_ms, false, 'I/O average time per operation') %>
 </div>
 </div>
   </table>
   </div>
 
+  <%= rates_chart_or_text('advanced-gc-stats-count', node,
+      [['GC', 'gc_num']],
+      fmt_rate, fmt_rate_axis, true, 'GC operations', 'gc-operations') %>
+
+  <%= rates_chart_or_text('advanced-gc-bytes-stats-count', node,
+      [['GC bytes reclaimed', 'gc_bytes_reclaimed']],
+      fmt_rate, fmt_rate_axis, true, 'GC bytes reclaimed', 'gc-bytes') %>
+
+  <%= rates_chart_or_text('advanced-context-switches-stats-count', node,
+      [['Context switches', 'context_switches']],
+      fmt_rate, fmt_rate_axis, true, 'Context switch operations', 'context-switches-operations') %>
+
 <h3>All applications</h3>
 <table class="list">
     <tr>
similarity index 88%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/overview.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/overview.ejs
index 86ff6ac17f395236049da17a45b56fb5d3218986..42bac235d7aa2843144f0a61c31bd1ce045d78ec 100644 (file)
 <div class="section-hidden administrator-only">
 <h2>Import / export definitions</h2>
 <div class="hider">
-  <form action="api/definitions" method="post" enctype="multipart/form-data">
+  <form  method="post" enctype="multipart/form-data">
     <table class="two-col-layout">
       <tr>
         <td>
           </p>
         </td>
       </tr>
+      <tr>
+        <td>
+            <% if (vhosts_interesting) { %>
+          <label>Virtual host:</label>
+            <select name="vhost-download">
+              <option value="all">All</option>
+              <% for (var i = 0; i < vhosts.length; i++) { %>
+              <option value="<%= fmt_string(vhosts[i].name) %>"><%= fmt_string(vhosts[i].name) %></option>
+              <% } %>
+            </select> <span class="help" id="export-definitions-vhost"></span>
+<% } else { %>
+            <input type="hidden" name="vhost" value="all"/>
+<% } %>
+        </td>
+        <td>
+         <% if (vhosts_interesting) { %>
+          <label>Virtual host:</label>
+            <select name="vhost-upload">
+              <option value="all">All</option>
+              <% for (var i = 0; i < vhosts.length; i++) { %>
+              <option value="<%= fmt_string(vhosts[i].name) %>"><%= fmt_string(vhosts[i].name) %></option>
+              <% } %>
+            </select> <span class="help" id="import-definitions-vhost"></span>
+
+<% } else { %>
+            <input type="hidden" name="vhost" value="all"/>
+<% } %>
+        </td>
+      </tr>
       <tr>
         <td>
           <p>
         </td>
         <td>
           <p>
-            <input type="hidden" name="redirect" value="../#/import-succeeded"/>
-            <input type="submit" value="Upload broker definitions"/>
+            <input type="hidden" name="redirect" value="../../#/import-succeeded"/>
+            <input type="submit" value="Upload broker definitions"  onclick="submit_import($(this).closest('form')[0])" />
             <span class="help" id="import-definitions"></span>
           </p>
         </td>
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/policies.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs
index 9e4e3c2c3f263cd3a16aa2b3c20721e1f34faff4..1816f7a6a7b0451679a4f896c33ae4fc0adbb892 100644 (file)
 <% if (vhosts_interesting) { %>
      <td><%= fmt_string(policy.vhost) %></td>
 <% } %>
+<% if (is_user_policymaker) { %>
      <td><%= link_policy(policy.vhost, policy.name) %></td>
+<% } else { %>
+     <td><%= fmt_string(policy.name) %></td>
+<% } %>
      <td><%= fmt_string(policy.pattern) %></td>
      <td><%= fmt_string(policy['apply-to']) %></td>
      <td><%= fmt_table_short(policy.definition) %></td>
@@ -42,6 +46,7 @@
   </div>
   </div>
 </div>
+<% if (is_user_policymaker) { %>
 
 <div class="section-hidden">
   <h2>Add / update a policy</h2>
     </form>
   </div>
 </div>
+<% } %>
similarity index 89%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/queue.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs
index 46fcd4426c81ca59c371f5945524086c71032fa3..23cad6584dfb2378306fec71cb1b599268152786 100644 (file)
   </div>
 </div>
 
-<% if (user_policymaker) { %>
+<% if (is_user_policymaker) { %>
 <div class="section-hidden">
   <h2>Move messages</h2>
   <div class="hider">
     </form>
   </div>
 </div>
+
+<% if(queue.reductions || queue.garbage_collection) { %>
+<div class="section-hidden">
+<h2>Runtime Metrics (Advanced)</h2>
+ <div class="hider updatable">
+ <%= data_reductions('reductions-rates-queue', queue) %>
+ <table class="facts">
+    <% if (queue.garbage_collection.min_bin_vheap_size) { %>
+        <tr>
+        <th>Minimum binary virtual heap size in words (min_bin_vheap_size)</th>
+        <td><%= queue.garbage_collection.min_bin_vheap_size %></td>
+        </tr>
+    <% } %>
+
+    <% if (queue.garbage_collection.min_heap_size) { %>
+        <tr>
+        <th>Minimum heap size in words (min_heap_size)</th>
+        <td><%= queue.garbage_collection.min_heap_size %></td>
+        </tr>
+    <% } %>
+
+    <% if (queue.garbage_collection.fullsweep_after) { %>
+        <tr>
+        <th>Maximum generational collections before fullsweep (fullsweep_after)</th>
+        <td><%= queue.garbage_collection.fullsweep_after %></td>
+        </tr>
+    <% } %>
+
+    <% if (queue.garbage_collection.minor_gcs) { %>
+        <tr>
+        <th>Number of minor GCs (minor_gcs)</th>
+        <td><%= queue.garbage_collection.minor_gcs %></td>
+        </tr>
+    <% } %>
+ </table>
+ </div>
+</div>
+
+<% } %>
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/queues.ejs
rename to deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs
index 0c3fed87aa7ce47d433c8a3f7bc554a38be58ca3..f0a78f000adbe70ba5b18d02d84d6c2ff435f964 100644 (file)
@@ -1,10 +1,9 @@
 <h1>Queues</h1>
 <div class="section">
-  <h2>All queues</h2>
-  <div class="hider">
-<%= filter_ui(queues) %>
-  <div class="updatable">
-<% if (queues.length > 0) { %>
+  <%= pagiante_ui(queues, 'queues') %>
+</div> 
+<div class="updatable">
+<% if (queues.items.length > 0) { %>
 <table class="list">
  <thead>
   <tr>
@@ -90,8 +89,8 @@
  </thead>
  <tbody>
 <%
-  for (var i = 0; i < queues.length; i++) {
-    var queue = queues[i];
+  for (var i = 0; i < queues.items.length; i++) {
+    var queue = queues.items[i];
 %>
   <tr<%= alt_rows(i, queue.arguments) %>>
 <% if (vhosts_interesting) { %>
diff --git a/deps/rabbitmq_management/rabbitmq-components.mk b/deps/rabbitmq_management/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_app.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_app.erl
index b5f4b6b9504f19c6a4c06503c6186e8290203dd2..bde338bcd62b7a969c0a07e213c9f9d87bb6ec28 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_app).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_channel_stats_collector.erl b/deps/rabbitmq_management/src/rabbit_mgmt_channel_stats_collector.erl
new file mode 100644 (file)
index 0000000..6bd222f
--- /dev/null
@@ -0,0 +1,125 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_channel_stats_collector).
+
+-include("rabbit_mgmt.hrl").
+-include("rabbit_mgmt_metrics.hrl").
+-include("rabbit_mgmt_event_collector.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-behaviour(gen_server2).
+
+-export([start_link/0]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+         code_change/3, handle_pre_hibernate/1]).
+
+-export([prioritise_cast/3]).
+
+-import(rabbit_misc, [pget/3]).
+-import(rabbit_mgmt_db, [pget/2, id_name/1, id/2, lookup_element/2]).
+
+prioritise_cast({event, #event{type = channel_stats}}, Len,
+                #state{max_backlog = MaxBacklog} = _State)
+  when Len > MaxBacklog ->
+    drop;
+prioritise_cast(_Msg, _Len, _State) ->
+    0.
+
+%% See the comment on rabbit_mgmt_db for the explanation of
+%% events and stats.
+
+%% Although this gen_server could process all types of events through the
+%% handle_cast, rabbit_mgmt_db_handler (in the management agent) forwards
+%% only the non-prioritiy events channel_stats
+%%----------------------------------------------------------------------------
+%% API
+%%----------------------------------------------------------------------------
+
+start_link() ->
+    case gen_server2:start_link({global, ?MODULE}, ?MODULE, [], []) of
+        {ok, Pid} -> register(?MODULE, Pid), %% [1]
+                     {ok, Pid};
+        Else      -> Else
+    end.
+%% [1] For debugging it's helpful to locally register the name too
+%% since that shows up in places global names don't.
+
+%%----------------------------------------------------------------------------
+%% Internal, gen_server2 callbacks
+%%----------------------------------------------------------------------------
+
+init([]) ->
+    {ok, Interval} = application:get_env(rabbit, collect_statistics_interval),
+    {ok, RatesMode} = application:get_env(rabbitmq_management, rates_mode),
+    {ok, MaxBacklog} = application:get_env(rabbitmq_management,
+                                           stats_event_max_backlog),
+    process_flag(priority, high),
+    rabbit_log:info("Statistics channel stats collector started.~n"),
+    {ok, reset_lookups(
+           #state{interval               = Interval,
+                  rates_mode             = RatesMode,
+                  max_backlog            = MaxBacklog}), hibernate,
+     {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
+
+%% Used in rabbit_mgmt_test_db where we need guarantees events have
+%% been handled before querying
+handle_call({event, Event = #event{reference = none}}, _From, State) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    reply(ok, State);
+
+handle_call(_Request, _From, State) ->
+    reply(not_understood, State).
+
+%% Only handle events that are real.
+handle_cast({event, Event = #event{reference = none}}, State) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    noreply(State);
+
+handle_cast({event, Event = #event{reference = Ref}},
+            State = #state{event_refresh_ref = Ref}) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    noreply(State);
+
+handle_cast(_Request, State) ->
+    noreply(State).
+
+handle_info(_Info, State) ->
+    noreply(State).
+
+terminate(_Arg, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+reply(Reply, NewState) -> {reply, Reply, NewState, hibernate}.
+noreply(NewState) -> {noreply, NewState, hibernate}.
+
+reset_lookups(State) ->
+    State#state{lookups = [{exchange, fun rabbit_exchange:lookup/1},
+                           {queue,    fun rabbit_amqqueue:lookup/1}]}.
+
+handle_pre_hibernate(State) ->
+    %% rabbit_event can end up holding on to some memory after a busy
+    %% workout, but it's not a gen_server so we can't make it
+    %% hibernate. The best we can do is forcibly GC it here (if
+    %% rabbit_mgmt_db is hibernating the odds are rabbit_event is
+    %% quiescing in some way too).
+    rpc:multicall(
+      rabbit_mnesia:cluster_nodes(running), rabbit_mgmt_db_handler, gc, []),
+    {hibernate, State}.
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_cors.erl b/deps/rabbitmq_management/src/rabbit_mgmt_cors.erl
new file mode 100644 (file)
index 0000000..5226241
--- /dev/null
@@ -0,0 +1,88 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Plugin.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+%% Useful documentation about CORS:
+%% * https://tools.ietf.org/html/rfc6454
+%% * https://www.w3.org/TR/cors/
+%% * https://staticapps.org/articles/cross-domain-requests-with-cors/
+-module(rabbit_mgmt_cors).
+
+-export([set_headers/2]).
+
+%% We don't set access-control-max-age because we currently have
+%% no way to know which headers apply to the whole resource. We
+%% only know for the next request.
+set_headers(ReqData, Module) ->
+    ReqData1 = case wrq:get_resp_header("vary", ReqData) of
+        undefined -> wrq:set_resp_header("vary", "origin", ReqData);
+        VaryValue -> wrq:set_resp_header("vary", VaryValue ++ ", origin", ReqData)
+    end,
+    case match_origin(ReqData1) of
+        false ->
+            ReqData1;
+        Origin ->
+            ReqData2 = case wrq:method(ReqData1) of
+                'OPTIONS' -> handle_options(ReqData1, Module);
+                _         -> ReqData1
+            end,
+            wrq:set_resp_headers([
+                {"access-control-allow-origin",      Origin},
+                {"access-control-allow-credentials", "true"}
+            ], ReqData2)
+    end.
+
+%% Set max-age from configuration (default: 30 minutes).
+%% Set allow-methods from what is defined in Module:allowed_methods/2.
+%% Set allow-headers to the same as the request (accept all headers).
+handle_options(ReqData, Module) ->
+    MaxAge = application:get_env(rabbitmq_management, cors_max_age, 1800),
+    {Methods, _, _} = Module:allowed_methods(undefined, undefined),
+    AllowMethods = string:join([atom_to_list(M) || M <- Methods], ", "),
+    ReqHeaders = wrq:get_req_header("access-control-request-headers", ReqData),
+    MaxAgeHd = case MaxAge of
+        undefined -> [];
+        _ -> {"access-control-max-age", integer_to_list(MaxAge)}
+    end,
+    MaybeAllowHeaders = case ReqHeaders of
+        undefined -> [];
+        _ -> [{"access-control-allow-headers", ReqHeaders}]
+    end,
+    wrq:set_resp_headers([MaxAgeHd,
+        {"access-control-allow-methods", AllowMethods}
+        |MaybeAllowHeaders], ReqData).
+
+%% If the origin header is missing or "null", we disable CORS.
+%% Otherwise, we only enable it if the origin is found in the
+%% cors_allow_origins configuration variable, or if "*" is (it
+%% allows all origins).
+match_origin(ReqData) ->
+    case wrq:get_req_header("origin", ReqData) of
+        undefined -> false;
+        "null" -> false;
+        Origin ->
+            AllowedOrigins = application:get_env(rabbitmq_management,
+                cors_allow_origins, []),
+            case lists:member(Origin, AllowedOrigins) of
+                true ->
+                    Origin;
+                false ->
+                    %% Maybe the configuration explicitly allows "*".
+                    case lists:member("*", AllowedOrigins) of
+                        true  -> Origin;
+                        false -> false
+                    end
+            end
+    end.
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_db.erl b/deps/rabbitmq_management/src/rabbit_mgmt_db.erl
new file mode 100644 (file)
index 0000000..8692bca
--- /dev/null
@@ -0,0 +1,702 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Plugin.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_db).
+
+-include("rabbit_mgmt.hrl").
+-include("rabbit_mgmt_metrics.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-behaviour(gen_server2).
+
+-export([start_link/0]).
+-export([pget/2, id_name/1, id/2, lookup_element/2]).
+
+-export([augment_exchanges/3, augment_queues/3,
+         augment_nodes/2, augment_vhosts/2,
+         get_channel/2, get_connection/2,
+         get_all_channels/1, get_all_connections/1,
+         get_all_consumers/0, get_all_consumers/1,
+         get_overview/2, get_overview/1]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+         code_change/3, handle_pre_hibernate/1,
+         format_message_queue/2]).
+
+-import(rabbit_misc, [pget/3]).
+
+%% The management database listens to events broadcast via the
+%% rabbit_event mechanism, and responds to queries from the various
+%% rabbit_mgmt_wm_* modules. It handles several kinds of events, and
+%% slices and dices them in various ways.
+%%
+%% There are three types of events coming in: created (when an object
+%% is created, containing immutable facts about it), stats (emitted on
+%% a timer, with mutable facts about the object), and deleted (just
+%% containing the object's ID). In this context "objects" means
+%% connections, channels, exchanges, queues, consumers, vhosts and
+%% nodes. Note that we do not care about users, permissions, bindings,
+%% parameters or policies.
+%%
+%% Connections and channels are identified by pids. Queues and
+%% exchanges are identified by names (which are #resource{}s). VHosts
+%% and nodes are identified by names which are binaries. And consumers
+%% are identified by {ChPid, QName, CTag}.
+%%
+%% The management database records the "created" events for
+%% connections, channels and consumers, and can thus be authoritative
+%% about those objects. For queues, exchanges and nodes we go to
+%% Mnesia to find out the immutable details of the objects.
+%%
+%% For everything other than consumers, the database can then augment
+%% these immutable details with stats, as the object changes. (We
+%% never emit anything very interesting about consumers).
+%%
+%% Stats on the inbound side are referred to as coarse- and
+%% fine-grained. Fine grained statistics are the message rates
+%% maintained by channels and associated with tuples: {publishing
+%% channel, exchange}, {publishing channel, exchange, queue} and
+%% {queue, consuming channel}. Coarse grained stats are everything
+%% else and are associated with only one object, not a tuple.
+%%
+%% Within the management database though we rearrange things a bit: we
+%% refer to basic stats, simple stats and detail stats.
+%%
+%% Basic stats are those coarse grained stats for which we do not
+%% retain a history and do not perform any calculations -
+%% e.g. connection.state or channel.prefetch_count.
+%%
+%% Simple stats are those for which we do history / calculations which
+%% are associated with one object *after aggregation* - so these might
+%% originate with coarse grained stats - e.g. connection.send_oct or
+%% queue.messages_ready. But they might also originate from fine
+%% grained stats which have been aggregated - e.g. the message rates
+%% for a vhost or queue.
+%%
+%% Finally, detailed stats are those for which we do history /
+%% calculations which are associated with two objects. These
+%% have to have originated as fine grained stats, but can still have
+%% been aggregated.
+%%
+%% Created events and basic stats are stored in ETS tables by object.
+%% Simple and detailed stats (which only differ depending on how
+%% they're keyed) are stored in aggregated stats tables
+%% (see rabbit_mgmt_stats.erl and include/rabbit_mgmt_metrics.hrl)
+%%
+%% Keys from simple and detailed stats are aggregated in several
+%% records, stored in different ETS tables. We store a base counter
+%% for everything that happened before the samples we have kept,
+%% and a series of records which add the timestamp as part of the key.
+%%
+%% Each ETS aggregated table has a GC process with a timer to periodically
+%% aggregate old samples in the base.
+%%
+%% We also have old_stats to let us calculate instantaneous
+%% rates, in order to apportion simple / detailed stats into time
+%% slices as they come in. These instantaneous rates are not returned
+%% in response to any query, the rates shown in the API are calculated
+%% at query time. old_stats contains both coarse and fine
+%% entries. Coarse entries are pruned when the corresponding object is
+%% deleted, and fine entries are pruned when the emitting channel is
+%% closed, and whenever we receive new fine stats from a channel. So
+%% it's quite close to being a cache of "the previous stats we
+%% received".
+%%
+%% Overall the object is to do all the aggregation when events come
+%% in, and make queries be simple lookups as much as possible. One
+%% area where this does not happen is the global overview - which is
+%% aggregated from vhost stats at query time since we do not want to
+%% reveal anything about other vhosts to unprivileged users.
+
+%%----------------------------------------------------------------------------
+%% API
+%%----------------------------------------------------------------------------
+
+start_link() ->
+    case gen_server2:start_link({global, ?MODULE}, ?MODULE, [], []) of
+        {ok, Pid} -> register(?MODULE, Pid), %% [1]
+                     {ok, Pid};
+        Else      -> Else
+    end.
+%% [1] For debugging it's helpful to locally register the name too
+%% since that shows up in places global names don't.
+
+%% R = Ranges, M = Mode
+augment_exchanges(Xs, R, M) -> safe_call({augment_exchanges, Xs, R, M}, Xs).
+augment_queues(Qs, R, M)    -> safe_call({augment_queues, Qs, R, M}, Qs).
+augment_vhosts(VHosts, R)   -> safe_call({augment_vhosts, VHosts, R}, VHosts).
+augment_nodes(Nodes, R)     -> safe_call({augment_nodes, Nodes, R}, Nodes).
+
+get_channel(Name, R)        -> safe_call({get_channel, Name, R}, not_found).
+get_connection(Name, R)     -> safe_call({get_connection, Name, R}, not_found).
+
+get_all_channels(R)         -> safe_call({get_all_channels, R}).
+get_all_connections(R)      -> safe_call({get_all_connections, R}).
+
+get_all_consumers()         -> safe_call({get_all_consumers, all}).
+get_all_consumers(V)        -> safe_call({get_all_consumers, V}).
+
+get_overview(User, R)       -> safe_call({get_overview, User, R}).
+get_overview(R)             -> safe_call({get_overview, all, R}).
+
+safe_call(Term)          -> safe_call(Term, []).
+safe_call(Term, Default) -> safe_call(Term, Default, 1).
+
+%% See rabbit_mgmt_sup_sup for a discussion of the retry logic.
+safe_call(Term, Default, Retries) ->
+    rabbit_misc:with_exit_handler(
+      fun () ->
+              case Retries of
+                  0 -> Default;
+                  _ -> rabbit_mgmt_sup_sup:start_child(),
+                       safe_call(Term, Default, Retries - 1)
+              end
+      end,
+      fun () -> gen_server2:call({global, ?MODULE}, Term, infinity) end).
+
+%%----------------------------------------------------------------------------
+%% Internal, gen_server2 callbacks
+%%----------------------------------------------------------------------------
+
+-record(state, {interval}).
+
+init([]) ->
+    %% When Rabbit is overloaded, it's usually especially important
+    %% that the management plugin work.
+    process_flag(priority, high),
+    {ok, Interval} = application:get_env(rabbit, collect_statistics_interval),
+    rabbit_log:info("Statistics database started.~n"),
+    {ok, #state{interval = Interval}, hibernate,
+     {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
+
+handle_call({augment_exchanges, Xs, Ranges, basic}, _From,
+            #state{interval = Interval} = State) ->
+    reply(list_exchange_stats(Ranges, Xs, Interval), State);
+
+handle_call({augment_exchanges, Xs, Ranges, full}, _From,
+            #state{interval = Interval} = State) ->
+    reply(detail_exchange_stats(Ranges, Xs, Interval), State);
+
+handle_call({augment_queues, Qs, Ranges, basic}, _From,
+            #state{interval = Interval} = State) ->
+    reply(list_queue_stats(Ranges, Qs, Interval), State);
+
+handle_call({augment_queues, Qs, Ranges, full}, _From,
+            #state{interval = Interval} = State) ->
+    reply(detail_queue_stats(Ranges, Qs, Interval), State);
+
+handle_call({augment_vhosts, VHosts, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    reply(vhost_stats(Ranges, VHosts, Interval), State);
+
+handle_call({augment_nodes, Nodes, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    {reply, node_stats(Ranges, Nodes, Interval), State};
+
+handle_call({get_channel, Name, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    case created_event(Name, channel_stats) of
+        not_found -> reply(not_found, State);
+        Ch        -> [Result] = detail_channel_stats(Ranges, [Ch], Interval),
+                     reply(Result, State)
+    end;
+
+handle_call({get_connection, Name, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    case created_event(Name, connection_stats) of
+        not_found -> reply(not_found, State);
+        Conn      -> [Result] = connection_stats(Ranges, [Conn], Interval),
+                     reply(Result, State)
+    end;
+
+handle_call({get_all_channels, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    Chans = created_events(channel_stats),
+    reply(list_channel_stats(Ranges, Chans, Interval), State);
+
+handle_call({get_all_connections, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    Conns = created_events(connection_stats),
+    reply(connection_stats(Ranges, Conns, Interval), State);
+
+handle_call({get_all_consumers, VHost}, _From, State) ->
+    {reply, [augment_msg_stats(augment_consumer(Obj)) ||
+                Obj <- consumers_by_queue_and_vhost(VHost)], State};
+
+handle_call({get_overview, User, Ranges}, _From,
+            #state{interval = Interval} = State) ->
+    VHosts = case User of
+                 all -> rabbit_vhost:list();
+                 _   -> rabbit_mgmt_util:list_visible_vhosts(User)
+             end,
+    %% TODO: there's no reason we can't do an overview of send_oct and
+    %% recv_oct now!
+    MessageStats = [overview_sum(Type, VHosts) ||
+                       Type <- [fine_stats, deliver_get, queue_msg_rates]],
+    QueueStats = [overview_sum(queue_msg_counts, VHosts)],
+    F = case User of
+            all -> fun (L) -> length(L) end;
+            _   -> fun (L) -> length(rabbit_mgmt_util:filter_user(L, User)) end
+        end,
+    %% Filtering out the user's consumers would be rather expensive so let's
+    %% just not show it
+    Consumers = case User of
+                    all -> [{consumers, ets:info(consumers_by_queue, size)}];
+                    _   -> []
+                end,
+    ObjectTotals = Consumers ++
+        [{queues,      length([Q || V <- VHosts,
+                                    Q <- rabbit_amqqueue:list(V)])},
+         {exchanges,   length([X || V <- VHosts,
+                                    X <- rabbit_exchange:list(V)])},
+         {connections, F(created_events(connection_stats))},
+         {channels,    F(created_events(channel_stats))}],
+    FormatMessage = format_samples(Ranges, MessageStats, Interval),
+    FormatQueue = format_samples(Ranges, QueueStats, Interval),
+    [rabbit_mgmt_stats:free(S) || {S, _, _} <- MessageStats],
+    [rabbit_mgmt_stats:free(S) || {S, _, _} <- QueueStats],
+    reply([{message_stats, FormatMessage},
+           {queue_totals,  FormatQueue},
+           {object_totals, ObjectTotals},
+           {statistics_db_event_queue, event_queue()}],
+          State);
+
+handle_call(_Request, _From, State) ->
+    reply(not_understood, State).
+
+handle_cast(_Request, State) ->
+    noreply(State).
+
+handle_info(_Info, State) ->
+    noreply(State).
+
+terminate(_Arg, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+reply(Reply, NewState) -> {reply, Reply, NewState, hibernate}.
+noreply(NewState) -> {noreply, NewState, hibernate}.
+
+handle_pre_hibernate(State) ->
+    %% rabbit_event can end up holding on to some memory after a busy
+    %% workout, but it's not a gen_server so we can't make it
+    %% hibernate. The best we can do is forcibly GC it here (if
+    %% rabbit_mgmt_db is hibernating the odds are rabbit_event is
+    %% quiescing in some way too).
+    rpc:multicall(
+      rabbit_mnesia:cluster_nodes(running), rabbit_mgmt_db_handler, gc, []),
+    {hibernate, State}.
+
+format_message_queue(Opt, MQ) -> rabbit_misc:format_message_queue(Opt, MQ).
+
+%%----------------------------------------------------------------------------
+%% Internal, utilities
+%%----------------------------------------------------------------------------
+
+pget(Key, List) -> pget(Key, List, unknown).
+
+%% id_name() and id() are for use when handling events, id_lookup()
+%% for when augmenting. The difference is that when handling events a
+%% queue name will be a resource, but when augmenting we will be
+%% passed a queue proplist that will already have been formatted -
+%% i.e. it will have name and vhost keys.
+id_name(node_stats)       -> name;
+id_name(node_node_stats)  -> route;
+id_name(vhost_stats)      -> name;
+id_name(queue_stats)      -> name;
+id_name(exchange_stats)   -> name;
+id_name(channel_stats)    -> pid;
+id_name(connection_stats) -> pid.
+
+id(Type, List) -> pget(id_name(Type), List).
+
+id_lookup(queue_stats, List) ->
+    rabbit_misc:r(pget(vhost, List), queue, pget(name, List));
+id_lookup(exchange_stats, List) ->
+    rabbit_misc:r(pget(vhost, List), exchange, pget(name, List));
+id_lookup(Type, List) ->
+    id(Type, List).
+
+lookup_element(Table, Key) -> lookup_element(Table, Key, 2).
+
+lookup_element(Table, Key, Pos) ->
+    try ets:lookup_element(Table, Key, Pos)
+    catch error:badarg -> []
+    end.
+
+%%----------------------------------------------------------------------------
+%% Internal, querying side
+%%----------------------------------------------------------------------------
+
+-define(QUEUE_DETAILS,
+        {queue_stats, [{incoming,   queue_exchange_stats, fun first/1},
+                       {deliveries, channel_queue_stats, fun second/1}]}).
+
+-define(EXCHANGE_DETAILS,
+        {exchange_stats, [{incoming, channel_exchange_stats, fun second/1},
+                          {outgoing, queue_exchange_stats, fun second/1}]}).
+
+-define(CHANNEL_DETAILS,
+        {channel_stats, [{publishes,  channel_exchange_stats, fun first/1},
+                         {deliveries, channel_queue_stats, fun first/1}]}).
+
+-define(NODE_DETAILS,
+        {node_stats, [{cluster_links, node_node_stats, fun first/1}]}).
+
+first(Id)  ->
+    {Id, '_'}.
+second(Id) ->
+    {'_', Id}.
+
+list_queue_stats(Ranges, Objs, Interval) ->
+    adjust_hibernated_memory_use(
+      merge_queue_stats(Objs, queue_funs(Ranges, Interval))).
+
+detail_queue_stats(Ranges, Objs, Interval) ->
+    adjust_hibernated_memory_use(
+      merge_queue_stats(Objs,
+                        [consumer_details_fun(
+                           fun (Props) -> id_lookup(queue_stats, Props) end,
+                           consumers_by_queue),
+                         detail_stats_fun(Ranges, ?QUEUE_DETAILS, Interval)
+                         | queue_funs(Ranges, Interval)])).
+
+queue_funs(Ranges, Interval) ->
+    [basic_stats_fun(queue_stats),
+     simple_stats_fun(Ranges, queue_stats, Interval),
+     augment_queue_msg_stats_fun()].
+
+list_exchange_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [simple_stats_fun(Ranges, exchange_stats, Interval),
+                       augment_msg_stats_fun()]).
+
+detail_exchange_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [simple_stats_fun(Ranges, exchange_stats, Interval),
+                       detail_stats_fun(Ranges, ?EXCHANGE_DETAILS, Interval),
+                       augment_msg_stats_fun()]).
+
+connection_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [basic_stats_fun(connection_stats),
+                       simple_stats_fun(Ranges, connection_stats, Interval),
+                       augment_msg_stats_fun()]).
+
+list_channel_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [basic_stats_fun(channel_stats),
+                       simple_stats_fun(Ranges, channel_stats, Interval),
+                       augment_msg_stats_fun()]).
+
+detail_channel_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [basic_stats_fun(channel_stats),
+                       simple_stats_fun(Ranges, channel_stats, Interval),
+                       consumer_details_fun(
+                         fun (Props) -> pget(pid, Props) end,
+                         consumers_by_channel),
+                       detail_stats_fun(Ranges, ?CHANNEL_DETAILS, Interval),
+                       augment_msg_stats_fun()]).
+
+vhost_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [simple_stats_fun(Ranges, vhost_stats, Interval)]).
+
+node_stats(Ranges, Objs, Interval) ->
+    merge_stats(Objs, [basic_stats_fun(node_stats),
+                       simple_stats_fun(Ranges, node_stats, Interval),
+                       detail_and_basic_stats_fun(
+                         node_node_stats, Ranges, ?NODE_DETAILS, Interval)]).
+
+merge_stats(Objs, Funs) ->
+    %% Don't pass the props to the Fun in combine, as it contains the results
+    %% from previous funs and:
+    %% * augment_msg_stats_fun() only needs the original object. Otherwise,
+    %%      must fold over a very longs list
+    %% * All other funs only require the Type that is in the original Obj
+    [combine_all_funs(Funs, Obj, Obj) || Obj <- Objs].
+
+combine_all_funs([Fun | Funs], Obj, Props) ->
+    combine_all_funs(Funs, Obj, combine(Fun(Obj), Props));
+combine_all_funs([], _Obj, Props) ->
+    Props.
+
+merge_queue_stats(Objs, Funs) ->
+    %% Don't pass the props to the Fun in combine, as it contains the results
+    %% from previous funs and:
+    %% * augment_msg_stats_fun() only needs the original object. Otherwise,
+    %%      must fold over a very longs list
+    %% * All other funs only require the Type that is in the original Obj
+    [begin
+         Pid = pget(pid, Obj),
+         {Pid, combine_all_funs(Funs, Obj, rabbit_mgmt_format:strip_queue_pids(Obj))}
+     end || Obj <- Objs].
+
+combine(New, Old) ->
+    case pget(state, Old) of
+        unknown -> New ++ Old;
+        live    -> New ++ lists:keydelete(state, 1, Old);
+        _       -> lists:keydelete(state, 1, New) ++ Old
+    end.
+
+%% i.e. the non-calculated stats
+basic_stats_fun(Type) ->
+    fun (Props) ->
+            Id = id_lookup(Type, Props),
+            lookup_element(Type, {Id, stats})
+    end.
+
+%% i.e. coarse stats, and fine stats aggregated up to a single number per thing
+simple_stats_fun(Ranges, Type, Interval) ->
+    {Msg, Other} = read_simple_stats(Type),
+    fun (Props) ->
+            Id = id_lookup(Type, Props),
+            OtherStats = format_samples(Ranges, {Id, Other}, Interval),
+            case format_samples(Ranges, {Id, Msg}, Interval) of
+                [] ->
+                    OtherStats;
+                MsgStats ->
+                    [{message_stats, MsgStats} | OtherStats]
+            end
+    end.
+
+%% i.e. fine stats that are broken out per sub-thing
+detail_stats_fun(Ranges, {IdType, FineSpecs}, Interval) ->
+    fun (Props) ->
+            Id = id_lookup(IdType, Props),
+            [detail_stats(Ranges, Name, AggregatedStatsType, IdFun(Id), Interval)
+             || {Name, AggregatedStatsType, IdFun} <- FineSpecs]
+    end.
+
+%% This does not quite do the same as detail_stats_fun +
+%% basic_stats_fun; the basic part here assumes compound keys (like
+%% detail stats) but non-calculated (like basic stats). Currently the
+%% only user of that is node-node stats.
+%%
+%% We also assume that FineSpecs is single length here (at [1]).
+detail_and_basic_stats_fun(Type, Ranges, {IdType, FineSpecs}, Interval) ->
+    F = detail_stats_fun(Ranges, {IdType, FineSpecs}, Interval),
+    fun (Props) ->
+            Id = id_lookup(IdType, Props),
+            BasicStats = ets:select(Type, [{{{{'$1', '$2'}, '$3'}, '$4', '_'},
+                                               [{'==', '$1', Id},
+                                                {'==', '$3', stats}],
+                                               [{{'$2', '$4'}}]}]),
+            [{K, Items}] = F(Props), %% [1]
+            Items2 = [case lists:keyfind(id_lookup(IdType, Item), 1, BasicStats) of
+                          false -> Item;
+                          {_, BS} -> BS ++ Item
+                      end || Item <- Items],
+            [{K, Items2}]
+    end.
+
+read_simple_stats(EventType) ->
+    lists:partition(
+      fun({_, Type}) ->
+              lists:member(Type, [fine_stats, deliver_get, queue_msg_rates])
+      end, rabbit_mgmt_stats_tables:aggr_tables(EventType)).
+
+read_detail_stats(EventType, Id) ->
+    Tables = rabbit_mgmt_stats_tables:aggr_tables(EventType),
+    Keys =  [{Table, Type, Key} || {Table, Type} <- Tables,
+                                   Key <- rabbit_mgmt_stats:get_keys(Table, Id)],
+    lists:foldl(
+      fun ({_Table, _Type, Id0} = Entry, L) ->
+              NewId = revert(Id, Id0),
+              case lists:keyfind(NewId, 1, L) of
+                      false    ->
+                      [{NewId, [Entry]} | L];
+                  {NewId, KVs} ->
+                      lists:keyreplace(NewId, 1, L, {NewId, [Entry | KVs]})
+              end
+      end, [], Keys).
+
+revert({'_', _}, {Id, _}) ->
+    Id;
+revert({_, '_'}, {_, Id}) ->
+    Id.
+
+detail_stats(Ranges, Name, AggregatedStatsType, Id, Interval) ->
+    {Name,
+     [[{stats, format_samples(Ranges, KVs, Interval)} | format_detail_id(G)]
+      || {G, KVs} <- read_detail_stats(AggregatedStatsType, Id)]}.
+
+format_detail_id(ChPid) when is_pid(ChPid) ->
+    augment_msg_stats([{channel, ChPid}]);
+format_detail_id(#resource{name = Name, virtual_host = Vhost, kind = Kind}) ->
+    [{Kind, [{name, Name}, {vhost, Vhost}]}];
+format_detail_id(Node) when is_atom(Node) ->
+    [{name, Node}].
+
+format_samples(Ranges, {Id, ManyStats}, Interval) ->
+    lists:append(foldl_stats_format(ManyStats, Id, Ranges, Interval, []));
+format_samples(Ranges, ManyStats, Interval) ->
+    lists:append(foldl_stats_format(ManyStats, Ranges, Interval, [])).
+
+foldl_stats_format([{Table, Record} | T], Id, Ranges, Interval, Acc) ->
+    foldl_stats_format(T, Id, Ranges, Interval,
+                       stats_format(Table, Id, Record, Ranges, Interval, Acc));
+foldl_stats_format([], _Id, _Ranges, _Interval, Acc) ->
+    Acc.
+
+foldl_stats_format([{Table, Record, Id} | T], Ranges, Interval, Acc) ->
+    foldl_stats_format(T, Ranges, Interval,
+                       stats_format(Table, Id, Record, Ranges, Interval, Acc));
+foldl_stats_format([], _Ranges, _Interval, Acc) ->
+    Acc.
+
+stats_format(Table, Id, Record, Ranges, Interval, Acc) ->
+    case rabbit_mgmt_stats:is_blank(Table, Id, Record) of
+        true  ->
+            Acc;
+        false ->
+            [rabbit_mgmt_stats:format(pick_range(Record, Ranges),
+                                      Table, Id, Interval, Record) | Acc]
+    end.
+
+pick_range(queue_msg_counts, {RangeL, _RangeM, _RangeD, _RangeN}) ->
+    RangeL;
+pick_range(K, {_RangeL, RangeM, _RangeD, _RangeN}) when K == fine_stats;
+                                                        K == deliver_get;
+                                                        K == queue_msg_rates ->
+    RangeM;
+pick_range(K, {_RangeL, _RangeM, RangeD, _RangeN}) when K == coarse_conn_stats;
+                                                        K == process_stats ->
+    RangeD;
+pick_range(K, {_RangeL, _RangeM, _RangeD, RangeN})
+  when K == coarse_node_stats;
+       K == coarse_node_node_stats ->
+    RangeN.
+
+%% We do this when retrieving the queue record rather than when
+%% storing it since the memory use will drop *after* we find out about
+%% hibernation, so to do it when we receive a queue stats event would
+%% be fiddly and racy. This should be quite cheap though.
+adjust_hibernated_memory_use(Qs) ->
+    Pids = [Pid || {Pid, Q} <- Qs, pget(idle_since, Q, not_idle) =/= not_idle],
+    %% We use delegate here not for ordering reasons but because we
+    %% want to get the right amount of parallelism and minimise
+    %% cross-cluster communication.
+    {Mem, _BadNodes} = delegate:invoke(Pids, {erlang, process_info, [memory]}),
+    MemDict = dict:from_list([{P, M} || {P, M = {memory, _}} <- Mem]),
+    [case dict:find(Pid, MemDict) of
+         error        -> Q;
+         {ok, Memory} -> [Memory|proplists:delete(memory, Q)]
+     end || {Pid, Q} <- Qs].
+
+created_event(Name, Type) ->
+    case ets:select(Type, [{{{'_', '$1'}, '$2', '$3'}, [{'==', 'create', '$1'},
+                                                        {'==', Name, '$3'}],
+                            ['$2']}]) of
+        [] -> not_found;
+        [Elem] -> Elem
+    end.
+
+created_events(Type) ->
+    ets:select(Type, [{{{'_', '$1'}, '$2', '_'}, [{'==', 'create', '$1'}],
+                       ['$2']}]).
+
+consumers_by_queue_and_vhost(VHost) ->
+    ets:select(consumers_by_queue,
+               [{{{#resource{virtual_host = '$1', _ = '_'}, '_', '_'}, '$2'},
+                 [{'orelse', {'==', 'all', VHost}, {'==', VHost, '$1'}}],
+                 ['$2']}]).
+
+consumer_details_fun(KeyFun, TableName) ->
+    fun ([])    -> [];
+        (Props) -> Pattern = {KeyFun(Props), '_', '_'},
+                   [{consumer_details,
+                     [augment_msg_stats(augment_consumer(Obj))
+                      || Obj <- lists:append(
+                                  ets:match(TableName, {Pattern, '$1'}))]}]
+    end.
+
+augment_consumer(Obj) ->
+    [{queue, rabbit_mgmt_format:resource(pget(queue, Obj))} |
+     lists:keydelete(queue, 1, Obj)].
+
+%%----------------------------------------------------------------------------
+%% Internal, query-time summing for overview
+%%----------------------------------------------------------------------------
+
+overview_sum(Type, VHosts) ->
+    Stats = [{rabbit_mgmt_stats_tables:aggr_table(vhost_stats, Type), VHost}
+             || VHost <- VHosts],
+    {rabbit_mgmt_stats:sum(Stats), Type, all}.
+
+%%----------------------------------------------------------------------------
+%% Internal, query-time augmentation
+%%----------------------------------------------------------------------------
+
+augment_msg_stats(Props) ->
+    rabbit_mgmt_format:strip_pids(
+      (augment_msg_stats_fun())(Props) ++ Props).
+
+augment_msg_stats_fun() ->
+    fun(Props) ->
+            augment_details(Props, [])
+    end.
+
+augment_details([{_, none} | T], Acc) ->
+    augment_details(T, Acc);
+augment_details([{_, unknown} | T], Acc) ->
+    augment_details(T, Acc);
+augment_details([{connection, Value} | T], Acc) ->
+    augment_details(T, [{connection_details, augment_connection_pid(Value)} | Acc]);
+augment_details([{channel, Value} | T], Acc) ->
+    augment_details(T, [{channel_details, augment_channel_pid(Value)} | Acc]);
+augment_details([{owner_pid, Value} | T], Acc) ->
+    augment_details(T, [{owner_pid_details, augment_connection_pid(Value)} | Acc]);
+augment_details([_ | T], Acc) ->
+    augment_details(T, Acc);
+augment_details([], Acc) ->
+    Acc.
+
+augment_queue_msg_stats_fun() ->
+    fun(Props) ->
+            case lists:keyfind(owner_pid, 1, Props) of
+                {owner_pid, Value} when is_pid(Value) ->
+                    [{owner_pid_details, augment_connection_pid(Value)}];
+                _ ->
+                    []
+            end
+    end.
+
+augment_channel_pid(Pid) ->
+    Ch = lookup_element(channel_stats, {Pid, create}),
+    Conn = lookup_element(connection_stats,
+                          {pget(connection, Ch), create}),
+    [{name,            pget(name,   Ch)},
+     {number,          pget(number, Ch)},
+     {user,            pget(user,   Ch)},
+     {connection_name, pget(name,         Conn)},
+     {peer_port,       pget(peer_port,    Conn)},
+     {peer_host,       pget(peer_host,    Conn)}].
+
+augment_connection_pid(Pid) ->
+    Conn = lookup_element(connection_stats, {Pid, create}),
+    [{name,         pget(name,         Conn)},
+     {peer_port,    pget(peer_port,    Conn)},
+     {peer_host,    pget(peer_host,    Conn)}].
+
+event_queue() ->
+    {message_queue_len, Q0} =
+        erlang:process_info(whereis(rabbit_mgmt_event_collector),
+                            message_queue_len),
+    {message_queue_len, Q1} =
+        erlang:process_info(whereis(rabbit_mgmt_queue_stats_collector),
+                            message_queue_len),
+    {message_queue_len, Q2} =
+        erlang:process_info(whereis(rabbit_mgmt_channel_stats_collector),
+                            message_queue_len),
+    Q0 + Q1 + Q2.
similarity index 80%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_dispatcher.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl
index e42635e1dd2cf16adbaed0c562fd0d1a605b1bd8..53b05ae7f66bd5456857f2bf390f348b2d161bc1 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_dispatcher).
@@ -41,9 +41,16 @@ dispatcher() ->
      {["cluster-name"],                                            rabbit_mgmt_wm_cluster_name, []},
      {["nodes"],                                                   rabbit_mgmt_wm_nodes, []},
      {["nodes", node],                                             rabbit_mgmt_wm_node, []},
+     {["nodes", node, "memory"],                                   rabbit_mgmt_wm_node_memory, [absolute]},
+     {["nodes", node, "memory", "relative"],                       rabbit_mgmt_wm_node_memory, [relative]},
+     {["nodes", node, "memory", "ets"],                            rabbit_mgmt_wm_node_memory_ets, [absolute]},
+     {["nodes", node, "memory", "ets", "relative"],                rabbit_mgmt_wm_node_memory_ets, [relative]},
+     {["nodes", node, "memory", "ets", filter],                    rabbit_mgmt_wm_node_memory_ets, [absolute]},
+     {["nodes", node, "memory", "ets", filter, "relative"],        rabbit_mgmt_wm_node_memory_ets, [relative]},
      {["extensions"],                                              rabbit_mgmt_wm_extensions, []},
      {["all-configuration"],                                       rabbit_mgmt_wm_definitions, []}, %% This was the old name, let's not break things gratuitously.
      {["definitions"],                                             rabbit_mgmt_wm_definitions, []},
+     {["definitions", vhost],                                      rabbit_mgmt_wm_definitions, []},
      {["parameters"],                                              rabbit_mgmt_wm_parameters, []},
      {["parameters", component],                                   rabbit_mgmt_wm_parameters, []},
      {["parameters", component, vhost],                            rabbit_mgmt_wm_parameters, []},
@@ -78,11 +85,17 @@ dispatcher() ->
      {["vhosts"],                                                  rabbit_mgmt_wm_vhosts, []},
      {["vhosts", vhost],                                           rabbit_mgmt_wm_vhost, []},
      {["vhosts", vhost, "permissions"],                            rabbit_mgmt_wm_permissions_vhost, []},
+     %% /connections/:connection is already taken, we cannot use our standard scheme here
+     {["vhosts", vhost, "connections"],                            rabbit_mgmt_wm_connections_vhost, []},
+     %% /channels/:channel is already taken, we cannot use our standard scheme here
+     {["vhosts", vhost, "channels"],                               rabbit_mgmt_wm_channels_vhost, []},
      {["users"],                                                   rabbit_mgmt_wm_users, []},
      {["users", user],                                             rabbit_mgmt_wm_user, []},
      {["users", user, "permissions"],                              rabbit_mgmt_wm_permissions_user, []},
      {["whoami"],                                                  rabbit_mgmt_wm_whoami, []},
      {["permissions"],                                             rabbit_mgmt_wm_permissions, []},
      {["permissions", vhost, user],                                rabbit_mgmt_wm_permission, []},
-     {["aliveness-test", vhost],                                   rabbit_mgmt_wm_aliveness_test, []}
+     {["aliveness-test", vhost],                                   rabbit_mgmt_wm_aliveness_test, []},
+     {["healthchecks", "node"],                                    rabbit_mgmt_wm_healthchecks, []},
+     {["healthchecks", "node", node],                              rabbit_mgmt_wm_healthchecks, []}
     ].
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_event_collector.erl b/deps/rabbitmq_management/src/rabbit_mgmt_event_collector.erl
new file mode 100644 (file)
index 0000000..a798453
--- /dev/null
@@ -0,0 +1,165 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_event_collector).
+
+-include("rabbit_mgmt.hrl").
+-include("rabbit_mgmt_metrics.hrl").
+-include("rabbit_mgmt_event_collector.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-behaviour(gen_server2).
+
+-export([start_link/0]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+         code_change/3, handle_pre_hibernate/1]).
+
+%% For testing
+-export([override_lookups/1, reset_lookups/0]).
+
+-import(rabbit_mgmt_db, [pget/2]).
+
+%% See the comment on rabbit_mgmt_db for the explanation of
+%% events and stats.
+
+%% Although this gen_server could process all types of events through the
+%% handle_cast, rabbit_mgmt_db_handler (in the management agent) forwards
+%% the non-prioritiy events channel_stats and queue_stats to their own gen_servers
+
+%%----------------------------------------------------------------------------
+%% API
+%%----------------------------------------------------------------------------
+
+start_link() ->
+    Ref = make_ref(),
+    case gen_server2:start_link({global, ?MODULE}, ?MODULE, [Ref], []) of
+        {ok, Pid} -> register(?MODULE, Pid), %% [1]
+                     rabbit:force_event_refresh(Ref),
+                     {ok, Pid};
+        Else      -> Else
+    end.
+%% [1] For debugging it's helpful to locally register the name too
+%% since that shows up in places global names don't.
+
+override_lookups(Lookups) ->
+    gen_server2:call({global, ?MODULE}, {override_lookups, Lookups}, infinity).
+reset_lookups() ->
+    gen_server2:call({global, ?MODULE}, reset_lookups, infinity).
+
+%%----------------------------------------------------------------------------
+%% Internal, gen_server2 callbacks
+%%----------------------------------------------------------------------------
+
+init([Ref]) ->
+    %% When Rabbit is overloaded, it's usually especially important
+    %% that the management plugin work.
+    process_flag(priority, high),
+    {ok, Interval} = application:get_env(rabbit, collect_statistics_interval),
+    {ok, RatesMode} = application:get_env(rabbitmq_management, rates_mode),
+    rabbit_node_monitor:subscribe(self()),
+    rabbit_log:info("Statistics event collector started.~n"),
+    ?TABLES = [ets:new(Key, [public, set, named_table]) || Key <- ?TABLES],
+    %% Index for cleaning up stats of abnormally terminated processes.
+    [ets:new(rabbit_mgmt_stats_tables:key_index(Table),
+             [ordered_set, public, named_table]) || Table <- ?PROC_STATS_TABLES],
+    %% Index for the deleting of fine stats, reduces the number of reductions
+    %% to 1/8 under heavy load.
+    ets:new(old_stats_fine_index, [bag, public, named_table]),
+    ?AGGR_TABLES = [rabbit_mgmt_stats:blank(Name) || Name <- ?AGGR_TABLES],
+    {ok, reset_lookups(
+           #state{interval               = Interval,
+                  event_refresh_ref      = Ref,
+                  rates_mode             = RatesMode}), hibernate,
+     {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
+
+%% Used in rabbit_mgmt_test_db where we need guarantees events have
+%% been handled before querying
+handle_call({event, Event = #event{reference = none}}, _From, State) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    reply(ok, State);
+
+handle_call({override_lookups, Lookups}, _From, State) ->
+    reply(ok, State#state{lookups = Lookups});
+
+handle_call(reset_lookups, _From, State) ->
+    reply(ok, reset_lookups(State));
+
+handle_call(_Request, _From, State) ->
+    reply(not_understood, State).
+
+%% Only handle events that are real, or pertain to a force-refresh
+%% that we instigated.
+handle_cast({event, Event = #event{reference = none}}, State) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    noreply(State);
+
+handle_cast({event, Event = #event{reference = Ref}},
+            State = #state{event_refresh_ref = Ref}) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    noreply(State);
+
+handle_cast(_Request, State) ->
+    noreply(State).
+
+handle_info({node_down, Node}, State) ->
+    Conns = created_events(connection_stats),
+    Chs = created_events(channel_stats),
+    delete_all_from_node(connection_closed, Node, Conns, State),
+    delete_all_from_node(channel_closed, Node, Chs, State),
+    noreply(State);
+
+handle_info(_Info, State) ->
+    noreply(State).
+
+terminate(_Arg, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+reply(Reply, NewState) -> {reply, Reply, NewState, hibernate}.
+noreply(NewState) -> {noreply, NewState, hibernate}.
+
+reset_lookups(State) ->
+    State#state{lookups = [{exchange, fun rabbit_exchange:lookup/1},
+                           {queue,    fun rabbit_amqqueue:lookup/1}]}.
+
+handle_pre_hibernate(State) ->
+    %% rabbit_event can end up holding on to some memory after a busy
+    %% workout, but it's not a gen_server so we can't make it
+    %% hibernate. The best we can do is forcibly GC it here (if
+    %% rabbit_mgmt_db is hibernating the odds are rabbit_event is
+    %% quiescing in some way too).
+    rpc:multicall(
+      rabbit_mnesia:cluster_nodes(running), rabbit_mgmt_db_handler, gc, []),
+    {hibernate, State}.
+
+delete_all_from_node(Type, Node, [Item | Items], State) ->
+    Pid = pget(pid, Item),
+    case node(Pid) of
+        Node ->
+            rabbit_mgmt_event_collector_utils:handle_event(
+              #event{type = Type, props = [{pid, Pid}]}, State);
+        _    -> ok
+    end,
+    delete_all_from_node(Type, Node, Items, State);
+delete_all_from_node(_Type, _Node, [], _State) ->
+    ok.
+
+created_events(Table) ->
+    ets:select(Table, [{{{'_', '$1'}, '$2', '_'}, [{'==', 'create', '$1'}],
+                        ['$2']}]).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_event_collector_utils.erl b/deps/rabbitmq_management/src/rabbit_mgmt_event_collector_utils.erl
new file mode 100644 (file)
index 0000000..d7d2ee1
--- /dev/null
@@ -0,0 +1,551 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_event_collector_utils).
+
+-include("rabbit_mgmt_metrics.hrl").
+-include("rabbit_mgmt_event_collector.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([handle_event/2]).
+
+-import(rabbit_misc, [pget/3]).
+-import(rabbit_mgmt_db, [pget/2, id_name/1, id/2, lookup_element/2]).
+
+%%----------------------------------------------------------------------------
+%% External functions
+%%----------------------------------------------------------------------------
+
+%%------------------------------------------------------------------------------        %% @doc Handles events from any collector.
+%%
+%% All the gen_server of the collectors have the same internal state record,
+%% which contains the interval, lookups and rate_mode required
+%% by this function. Apart from the lookups that can be modified by the
+%% tests, the rest of the state doesn't change after startup.
+%%
+%% Ideally, the gen_server should pass only the required parameters and not the
+%% full state. However, this simplified the refactor and avoided changing all
+%% internal functions.
+%%
+%% @end
+%%------------------------------------------------------------------------------ 
+-spec handle_event(#event{}, #state{}) -> ok.
+handle_event(#event{type = queue_stats, props = Stats, timestamp = Timestamp},
+             State) ->
+    handle_stats(queue_stats, Stats, Timestamp,
+                 {fun rabbit_mgmt_format:format_queue_stats/1, false},
+                 ?QUEUE_MSG_COUNTS, ?QUEUE_MSG_RATES ++ ?PROCESS_STATS, State);
+
+handle_event(Event = #event{type = queue_deleted,
+                            props = [{name, Name}],
+                            timestamp = Timestamp},
+             State) ->
+    delete_consumers(Name, consumers_by_queue, consumers_by_channel),
+    %% This is fiddly. Unlike for connections and channels, we need to
+    %% decrease any amalgamated coarse stats for [messages,
+    %% messages_ready, messages_unacknowledged] for this queue - since
+    %% the queue's deletion means we have really got rid of messages!
+    Id = {coarse, {queue_stats, Name}},
+    %% This ceil must correspond to the ceil in append_samples/5
+    TS = ceil(Timestamp, State),
+    OldStats = lookup_element(old_stats, Id),
+    record_sample_list(Id, OldStats, TS, State, ?QUEUE_MSG_COUNTS),
+    delete_samples(channel_queue_stats,  {'_', Name}),
+    delete_samples(queue_exchange_stats, {Name, '_'}),
+    delete_samples(queue_stats,          Name),
+    handle_deleted(queue_stats, Event);
+
+handle_event(Event = #event{type = exchange_deleted,
+                            props = [{name, Name}]}, _State) ->
+    delete_samples(channel_exchange_stats,  {'_', Name}),
+    delete_samples(queue_exchange_stats,    {'_', Name}),
+    delete_samples(exchange_stats,          Name),
+    handle_deleted(exchange_stats, Event);
+
+handle_event(#event{type = vhost_deleted,
+                    props = [{name, Name}]}, _State) ->
+    delete_samples(vhost_stats, Name);
+
+handle_event(#event{type = connection_created, props = Stats}, _State) ->
+    handle_created(
+      connection_stats, Stats,
+      {fun rabbit_mgmt_format:format_connection_created/1, true});
+
+handle_event(#event{type = connection_stats, props = Stats,
+                    timestamp = Timestamp},
+             State) ->
+    handle_stats(connection_stats, Stats, Timestamp, {[], false},
+                 ?COARSE_CONN_STATS, ?PROCESS_STATS, State);
+
+handle_event(Event = #event{type  = connection_closed,
+                            props = [{pid, Pid}]}, _State) ->
+    delete_samples(connection_stats, Pid),
+    handle_deleted(connection_stats, Event);
+
+handle_event(#event{type = channel_created, props = Stats}, _State) ->
+    handle_created(channel_stats, Stats, {[], false});
+
+handle_event(#event{type = channel_stats, props = Stats, timestamp = Timestamp},
+             State) ->
+    handle_stats(channel_stats, Stats, Timestamp,
+                 {fun rabbit_mgmt_format:format_channel_stats/1, true},
+                 [], ?PROCESS_STATS, State),
+    ChPid = id(channel_stats, Stats),
+    AllStats = [old_fine_stats(ChPid, Type, Stats)
+                || Type <- ?FINE_STATS_TYPES],
+    Objs = ets:lookup(old_stats_fine_index, ChPid),
+    ets:delete(old_stats_fine_index, ChPid),
+    [ets:delete(old_stats, Key) || {_, Key} <- Objs],
+    %% This ceil must correspond to the ceil in handle_event
+    %% queue_deleted
+    handle_fine_stats_list(ChPid, ceil(Timestamp, State), State, AllStats);
+
+handle_event(Event = #event{type = channel_closed,
+                            props = [{pid, Pid}]},
+             _State) ->
+    delete_consumers(Pid, consumers_by_channel, consumers_by_queue),
+    delete_samples(channel_queue_stats,    {Pid, '_'}),
+    delete_samples(channel_exchange_stats, {Pid, '_'}),
+    delete_samples(channel_stats,          Pid),
+    handle_deleted(channel_stats, Event),
+    Objs = ets:lookup(old_stats_fine_index, Pid),
+    ets:delete(old_stats_fine_index, Pid),
+    [ets:delete(old_stats, Key) || {_, Key} <- Objs];
+
+handle_event(#event{type = consumer_created, props = Props}, _State) ->
+    Fmt = {fun rabbit_mgmt_format:format_arguments/1, true},
+    handle_consumer(fun(Table, Id, P0) ->
+                            P = rabbit_mgmt_format:format(P0, Fmt),
+                            ets:insert(Table, {Id, P})
+                    end,
+                    Props);
+
+handle_event(#event{type = consumer_deleted, props = Props}, _State) ->
+    handle_consumer(fun(Table, Id, _P) -> ets:delete(Table, Id) end,
+                    Props);
+
+%% TODO: we don't clear up after dead nodes here - this is a very tiny
+%% leak every time a node is permanently removed from the cluster. Do
+%% we care?
+handle_event(#event{type = node_stats, props = Stats0, timestamp = Timestamp},
+             State) ->
+    Stats = proplists:delete(persister_stats, Stats0) ++
+        pget(persister_stats, Stats0),
+    handle_stats(node_stats, Stats, Timestamp, {[], false}, ?COARSE_NODE_STATS, State);
+
+handle_event(#event{type = node_node_stats, props = Stats,
+                    timestamp = Timestamp}, State) ->
+    handle_stats(node_node_stats, Stats, Timestamp, {[], false}, ?COARSE_NODE_NODE_STATS,
+                 State);
+
+handle_event(Event = #event{type  = node_node_deleted,
+                            props = [{route, Route}]}, _State) ->
+    delete_samples(node_node_stats, Route),
+    handle_deleted(node_node_stats, Event);
+
+handle_event(_Event, _State) ->
+    ok.
+
+%%----------------------------------------------------------------------------
+%% Internal functions
+%%----------------------------------------------------------------------------
+handle_stats(TName, Stats, Timestamp, Funs, RatesKeys, State) ->
+    handle_stats(TName, Stats, Timestamp, Funs, RatesKeys, [], State).
+
+handle_stats(TName, Stats, Timestamp, Funs, RatesKeys, NoAggRatesKeys,
+             State) ->
+    Id = id(TName, Stats),
+    IdSamples = {coarse, {TName, Id}},
+    OldStats = lookup_element(old_stats, IdSamples),
+    append_set_of_samples(
+      Stats, Timestamp, OldStats, IdSamples, RatesKeys, NoAggRatesKeys, State),
+    StripKeys = [id_name(TName)] ++ RatesKeys ++ ?FINE_STATS_TYPES,
+    Stats1 = [{K, V} || {K, V} <- Stats, not lists:member(K, StripKeys),
+                        V =/= unknown],
+    Stats2 = rabbit_mgmt_format:format(Stats1, Funs),
+    ets:insert(TName, {{Id, stats}, Stats2, Timestamp}),
+    ok.
+
+fine_stats_id(ChPid, {Q, X}) -> {ChPid, Q, X};
+fine_stats_id(ChPid, QorX)   -> {ChPid, QorX}.
+
+ceil(TS, #state{interval = Interval}) ->
+    rabbit_mgmt_util:ceil(TS, Interval).
+
+handle_created(TName, Stats, Funs) ->
+    Formatted = rabbit_mgmt_format:format(Stats, Funs),
+    Id = id(TName, Stats),
+    ets:insert(TName, {{Id, create}, Formatted, pget(name, Stats)}),
+    case lists:member(TName, ?PROC_STATS_TABLES) of
+        true  -> ets:insert(rabbit_mgmt_stats_tables:key_index(TName), {Id});
+        false -> true
+    end.
+
+handle_deleted(TName, #event{props = Props}) ->
+    Id = id(TName, Props),
+    case lists:member(TName, ?TABLES) of
+        true  -> ets:delete(TName, {Id, create}),
+                 ets:delete(TName, {Id, stats});
+        false -> ok
+    end,
+    ets:delete(old_stats, {coarse, {TName, Id}}),
+    case lists:member(TName, ?PROC_STATS_TABLES) of
+        true  -> ets:delete(rabbit_mgmt_stats_tables:key_index(TName), Id);
+        false -> true
+    end.
+
+handle_consumer(Fun, Props) ->
+    P = rabbit_mgmt_format:format(Props, {[], false}),
+    CTag = pget(consumer_tag, P),
+    Q    = pget(queue,        P),
+    Ch   = pget(channel,      P),
+    Fun(consumers_by_queue,  {Q, Ch, CTag}, P),
+    Fun(consumers_by_channel, {Ch, Q, CTag}, P).
+
+%% The consumer_deleted event is emitted by queues themselves -
+%% therefore in the event that a queue dies suddenly we may not get
+%% it. The best way to handle this is to make sure we also clean up
+%% consumers when we hear about any queue going down.
+delete_consumers(PrimId, PrimTableName, SecTableName) ->
+    SecIdCTags = ets:match(PrimTableName, {{PrimId, '$1', '$2'}, '_'}),
+    ets:match_delete(PrimTableName, {{PrimId, '_', '_'}, '_'}),
+    delete_consumers_entry(PrimId, SecTableName, SecIdCTags).
+
+delete_consumers_entry(PrimId, SecTableName, [[SecId, CTag] | SecIdTags]) ->
+    ets:delete(SecTableName, {SecId, PrimId, CTag}),
+    delete_consumers_entry(PrimId, SecTableName, SecIdTags);
+delete_consumers_entry(_PrimId, _SecTableName, []) ->
+    ok.
+
+old_fine_stats(ChPid, Type, Props) ->
+    case pget(Type, Props) of
+        unknown       -> ignore;
+        AllFineStats0 -> [begin
+                              Id = fine_stats_id(ChPid, Ids),
+                              {{fine, Id}, Stats, lookup_element(old_stats, {fine, Id})}
+                          end || {Ids, Stats} <- AllFineStats0]
+    end.
+
+handle_fine_stats_list(ChPid, Timestamp, State, [AllStatsElem | AllStats]) ->
+    handle_fine_stats(ChPid, Timestamp, AllStatsElem, State),
+    handle_fine_stats_list(ChPid, Timestamp, State, AllStats);
+handle_fine_stats_list(_ChPid, _Timestamp, _State, []) ->
+    ok.
+
+handle_fine_stats(_ChPid, _Timestamp, ignore, _State) ->
+    ok;
+handle_fine_stats(ChPid, Timestamp, [{Id, Stats, OldStats} | AllStats], State) ->
+    Total = lists:sum([V || {K, V} <- Stats, lists:member(K, ?DELIVER_GET)]),
+    Stats1 = case Total of
+                 0 -> Stats;
+                 _ -> [{deliver_get, Total}|Stats]
+             end,
+    append_all_samples(Timestamp, OldStats, Id, true, State, Stats1),
+    ets:insert(old_stats, {Id, Stats1}),
+    ets:insert(old_stats_fine_index, {ChPid, Id}),
+    handle_fine_stats(ChPid, Timestamp, AllStats, State);
+handle_fine_stats(_ChPid, _Timestamp, [], _State) ->
+    ok.
+
+delete_samples(Type, Id0) ->
+    [rabbit_mgmt_stats:delete_stats(Table, Id0)
+     || {Table, _} <- rabbit_mgmt_stats_tables:aggr_tables(Type)].
+
+append_set_of_samples(Stats, TS, OldStats, Id, Keys, NoAggKeys, State) ->
+    %% Refactored to avoid duplicated calls to ignore_coarse_sample, ceil and
+    %% ets:insert(old_stats ...)
+    case ignore_coarse_sample(Id, State) of
+        false ->
+            %% This ceil must correspond to the ceil in handle_event
+            %% queue_deleted
+            NewMS = ceil(TS, State),
+            append_samples_by_keys(
+              Stats, NewMS, OldStats, Id, Keys, true, State),
+            append_samples_by_keys(
+              Stats, NewMS, OldStats, Id, NoAggKeys, false, State),
+            ets:insert(old_stats, {Id, Stats});
+        true ->
+            ok
+    end.
+
+append_samples_by_keys(Stats, TS, OldStats, Id, Keys, Agg, State) ->
+    case Keys of
+        all ->
+            append_all_samples(TS, OldStats, Id, Agg, State, Stats);
+        _   ->
+            append_some_samples(TS, OldStats, Id, Agg, State, Stats, Keys)
+    end.
+
+append_some_samples(NewMS, OldStats, Id, Agg, State, Stats, [K | Keys]) ->
+    V = pget(K, Stats),
+    case V =/= 0 orelse lists:member(K, ?ALWAYS_REPORT_STATS) of
+        true ->
+            append_sample(K, V, NewMS, OldStats, Id, Agg, State);
+        false ->
+            ok
+    end,
+    append_some_samples(NewMS, OldStats, Id, Agg, State, Stats, Keys);
+append_some_samples(_NewMS, _OldStats, _Id, _Agg, _State, _Stats, []) ->
+    ok.
+
+append_all_samples(NewMS, OldStats, Id, Agg, State, [{K, 0} | Stats]) ->
+    case lists:member(K, ?ALWAYS_REPORT_STATS) of
+        true ->
+            append_sample(K, 0, NewMS, OldStats, Id, Agg, State);
+        false ->
+            ok
+    end,
+    append_all_samples(NewMS, OldStats, Id, Agg, State, Stats);
+append_all_samples(NewMS, OldStats, Id, Agg, State, [{K, V} | Stats]) ->
+    append_sample(K, V, NewMS, OldStats, Id, Agg, State),
+    append_all_samples(NewMS, OldStats, Id, Agg, State, Stats);
+append_all_samples(_NewMS, _OldStats, _Id, _Agg, _State, []) ->
+    ok.
+
+append_sample(Key, Val, NewMS, OldStats, Id, Agg, State) when is_number(Val) ->
+    OldVal = case pget(Key, OldStats, 0) of
+        N when is_number(N) -> N;
+        _                   -> 0
+    end,
+    record_sample(Id, {Key, Val - OldVal, NewMS, State}, Agg, State),
+    ok;
+append_sample(_Key, _Value, _NewMS, _OldStats, _Id, _Agg, _State) ->
+    ok.
+
+ignore_coarse_sample({coarse, {queue_stats, Q}}, State) ->
+    not object_exists(Q, State);
+ignore_coarse_sample(_, _) ->
+    false.
+
+
+record_sample_list(Id, OldStats, TS, State, [Key | Keys]) ->
+    record_sample(Id, {Key, -pget(Key, OldStats, 0), TS, State}, true, State),
+    record_sample_list(Id, OldStats, TS, State, Keys);
+record_sample_list(_Id, _OldStats, _TS, _State, []) ->
+    ok.
+
+%% Node stats do not have a vhost of course
+record_sample({coarse, {node_stats, _Node} = Id}, Args, true, _State) ->
+    record_sample0(Id, Args);
+
+record_sample({coarse, {node_node_stats, _Names} = Id}, Args, true, _State) ->
+    record_sample0(Id, Args);
+
+record_sample({coarse, Id}, Args, false, _State) ->
+    record_sample0(Id, Args);
+
+record_sample({coarse, Id}, Args, true, _State) ->
+    record_sample0(Id, Args),
+    record_sample0({vhost_stats, vhost(Id)}, Args);
+
+%% Deliveries / acks (Q -> Ch)
+record_sample({fine, {Ch, Q = #resource{kind = queue}}}, Args, true, State) ->
+    case object_exists(Q, State) of
+        true  -> record_sample0({channel_queue_stats, {Ch, Q}}, Args),
+                 record_sample0({queue_stats,         Q},       Args);
+        false -> ok
+    end,
+    record_sample0({channel_stats, Ch},       Args),
+    record_sample0({vhost_stats,   vhost(Q)}, Args);
+
+%% Publishes / confirms (Ch -> X)
+record_sample({fine, {Ch, X = #resource{kind = exchange}}}, Args, true,State) ->
+    case object_exists(X, State) of
+        true  -> record_sample0({channel_exchange_stats, {Ch, X}}, Args),
+                 record_sampleX(publish_in,              X,        Args);
+        false -> ok
+    end,
+    record_sample0({channel_stats, Ch},       Args),
+    record_sample0({vhost_stats,   vhost(X)}, Args);
+
+%% Publishes (but not confirms) (Ch -> X -> Q)
+record_sample({fine, {_Ch,
+                      Q = #resource{kind = queue},
+                      X = #resource{kind = exchange}}}, Args, true, State) ->
+    %% TODO This one logically feels like it should be here. It would
+    %% correspond to "publishing channel message rates to queue" -
+    %% which would be nice to handle - except we don't. And just
+    %% uncommenting this means it gets merged in with "consuming
+    %% channel delivery from queue" - which is not very helpful.
+    %% record_sample0({channel_queue_stats, {Ch, Q}}, Args),
+    QExists = object_exists(Q, State),
+    XExists = object_exists(X, State),
+    case QExists of
+        true  -> record_sample0({queue_stats,          Q},       Args);
+        false -> ok
+    end,
+    case QExists andalso XExists of
+        true  -> record_sample0({queue_exchange_stats, {Q,  X}}, Args);
+        false -> ok
+    end,
+    case XExists of
+        true  -> record_sampleX(publish_out,           X,        Args);
+        false -> ok
+    end.
+
+%% We have to check the queue and exchange objects still exist since
+%% their deleted event could be overtaken by a channel stats event
+%% which contains fine stats referencing them. That's also why we
+%% don't need to check the channels exist - their deleted event can't
+%% be overtaken by their own last stats event.
+%%
+%% Also, sometimes the queue_deleted event is not emitted by the queue
+%% (in the nodedown case) - so it can overtake the final queue_stats
+%% event (which is not *guaranteed* to be lost). So we make a similar
+%% check for coarse queue stats.
+%%
+%% We can be sure that mnesia will be up to date by the time we receive
+%% the event (even though we dirty read) since the deletions are
+%% synchronous and we do not emit the deleted event until after the
+%% deletion has occurred.
+object_exists(Name = #resource{kind = Kind}, #state{lookups = Lookups}) ->
+    case (pget(Kind, Lookups))(Name) of
+        {ok, _} -> true;
+        _       -> false
+    end.
+
+vhost(#resource{virtual_host = VHost}) ->
+    VHost;
+vhost({queue_stats, #resource{virtual_host = VHost}}) ->
+    VHost;
+vhost({TName, Pid}) ->
+    pget(vhost, lookup_element(TName, {Pid, create})).
+
+%% exchanges have two sets of "publish" stats, so rearrange things a touch
+record_sampleX(RenamePublishTo, X, {publish, Diff, TS, State}) ->
+    record_sample0({exchange_stats, X}, {RenamePublishTo, Diff, TS, State});
+record_sampleX(_RenamePublishTo, X, {Type, Diff, TS, State}) ->
+    record_sample0({exchange_stats, X}, {Type, Diff, TS, State}).
+
+%% Ignore case where ID1 and ID2 are in a tuple, i.e. detailed stats,
+%% when in basic mode
+record_sample0({Type, {_ID1, _ID2}}, {_, _, _, #state{rates_mode = basic}})
+  when Type =/= node_node_stats ->
+    ok;
+record_sample0({Type, Id0}, {Key0, Diff, TS, #state{}}) ->
+    {Key, Pos} = stat_type(Key0),
+    Id = {Id0, TS},
+    rabbit_mgmt_stats:record(Id, Pos, Diff, Key,
+                             rabbit_mgmt_stats_tables:aggr_table(Type, Key)).
+
+%%------------------------------------------------------------------------------
+%% @hidden
+%% @doc Returns the type of the stat and the position in the tuple
+%%
+%% Uses the record definitions for simplicity, keeping track of the positions in
+%% the tuple.
+%% @end
+%%------------------------------------------------------------------------------
+stat_type(deliver) ->
+    {deliver_get, #deliver_get.deliver};
+stat_type(deliver_no_ack) ->
+    {deliver_get, #deliver_get.deliver_no_ack};
+stat_type(get) ->
+    {deliver_get, #deliver_get.get};
+stat_type(get_no_ack) ->
+    {deliver_get, #deliver_get.get_no_ack};
+stat_type(publish) ->
+    {fine_stats, #fine_stats.publish};
+stat_type(publish_in) ->
+    {fine_stats, #fine_stats.publish_in};
+stat_type(publish_out) ->
+    {fine_stats, #fine_stats.publish_out};
+stat_type(ack) ->
+    {fine_stats, #fine_stats.ack};
+stat_type(deliver_get) ->
+    {fine_stats, #fine_stats.deliver_get};
+stat_type(confirm) ->
+    {fine_stats, #fine_stats.confirm};
+stat_type(return_unroutable) ->
+    {fine_stats, #fine_stats.return_unroutable};
+stat_type(redeliver) ->
+    {fine_stats, #fine_stats.redeliver};
+stat_type(disk_reads) ->
+    {queue_msg_rates, #queue_msg_rates.disk_reads};
+stat_type(disk_writes) ->
+    {queue_msg_rates, #queue_msg_rates.disk_writes};
+stat_type(messages) ->
+    {queue_msg_counts, #queue_msg_counts.messages};
+stat_type(messages_ready) ->
+    {queue_msg_counts, #queue_msg_counts.messages_ready};
+stat_type(messages_unacknowledged) ->
+    {queue_msg_counts, #queue_msg_counts.messages_unacknowledged};
+stat_type(mem_used) ->
+    {coarse_node_stats, #coarse_node_stats.mem_used};
+stat_type(fd_used) ->
+    {coarse_node_stats, #coarse_node_stats.fd_used};
+stat_type(sockets_used) ->
+    {coarse_node_stats, #coarse_node_stats.sockets_used};
+stat_type(proc_used) ->
+    {coarse_node_stats, #coarse_node_stats.proc_used};
+stat_type(disk_free) ->
+    {coarse_node_stats, #coarse_node_stats.disk_free};
+stat_type(io_read_count) ->
+    {coarse_node_stats, #coarse_node_stats.io_read_count};
+stat_type(io_read_bytes) ->
+    {coarse_node_stats, #coarse_node_stats.io_read_bytes};
+stat_type(io_read_time) ->
+    {coarse_node_stats, #coarse_node_stats.io_read_time};
+stat_type(io_write_count) ->
+    {coarse_node_stats, #coarse_node_stats.io_write_count};
+stat_type(io_write_bytes) ->
+    {coarse_node_stats, #coarse_node_stats.io_write_bytes};
+stat_type(io_write_time) ->
+    {coarse_node_stats, #coarse_node_stats.io_write_time};
+stat_type(io_sync_count) ->
+    {coarse_node_stats, #coarse_node_stats.io_sync_count};
+stat_type(io_sync_time) ->
+    {coarse_node_stats, #coarse_node_stats.io_sync_time};
+stat_type(io_seek_count) ->
+    {coarse_node_stats, #coarse_node_stats.io_seek_count};
+stat_type(io_seek_time) ->
+    {coarse_node_stats, #coarse_node_stats.io_seek_time};
+stat_type(io_reopen_count) ->
+    {coarse_node_stats, #coarse_node_stats.io_reopen_count};
+stat_type(mnesia_ram_tx_count) ->
+    {coarse_node_stats, #coarse_node_stats.mnesia_ram_tx_count};
+stat_type(mnesia_disk_tx_count) ->
+    {coarse_node_stats, #coarse_node_stats.mnesia_disk_tx_count};
+stat_type(msg_store_read_count) ->
+    {coarse_node_stats, #coarse_node_stats.msg_store_read_count};
+stat_type(msg_store_write_count) ->
+    {coarse_node_stats, #coarse_node_stats.msg_store_write_count};
+stat_type(queue_index_journal_write_count) ->
+    {coarse_node_stats, #coarse_node_stats.queue_index_journal_write_count};
+stat_type(queue_index_write_count) ->
+    {coarse_node_stats, #coarse_node_stats.queue_index_write_count};
+stat_type(queue_index_read_count) ->
+    {coarse_node_stats, #coarse_node_stats.queue_index_read_count};
+stat_type(gc_num) ->
+    {coarse_node_stats, #coarse_node_stats.gc_num};
+stat_type(gc_bytes_reclaimed) ->
+    {coarse_node_stats, #coarse_node_stats.gc_bytes_reclaimed};
+stat_type(context_switches) ->
+    {coarse_node_stats, #coarse_node_stats.context_switches};
+stat_type(send_bytes) ->
+    {coarse_node_node_stats, #coarse_node_node_stats.send_bytes};
+stat_type(recv_bytes) ->
+    {coarse_node_node_stats, #coarse_node_node_stats.recv_bytes};
+stat_type(recv_oct) ->
+    {coarse_conn_stats, #coarse_conn_stats.recv_oct};
+stat_type(send_oct) ->
+    {coarse_conn_stats, #coarse_conn_stats.send_oct};
+stat_type(reductions) ->
+    {process_stats, #process_stats.reductions};
+stat_type(io_file_handle_open_attempt_count) ->
+    {coarse_node_stats, #coarse_node_stats.io_file_handle_open_attempt_count};
+stat_type(io_file_handle_open_attempt_time) ->
+    {coarse_node_stats, #coarse_node_stats.io_file_handle_open_attempt_time}.
similarity index 59%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_format.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_format.erl
index d2a06414dcaa84fc4b3565faec9217054d357e40..6323738befb41173d347ff706292e7a25fbb74e9 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_format).
 
--export([format/2, print/2, remove/1, ip/1, ipb/1, amqp_table/1, tuple/1]).
--export([parameter/1, now_to_str/1, now_to_str_ms/1, now_to_ms/1, strip_pids/1]).
--export([node_from_pid/1, protocol/1, resource/1, queue/1, queue_state/1]).
+-export([format/2, ip/1, ipb/1, amqp_table/1, tuple/1]).
+-export([parameter/1, now_to_str/1, now_to_str_ms/1, strip_pids/1]).
+-export([protocol/1, resource/1, queue/1, queue_state/1]).
 -export([exchange/1, user/1, internal_user/1, binding/1, url/2]).
 -export([pack_binding_props/2, tokenise/1]).
 -export([to_amqp_table/1, listener/1, properties/1, basic_properties/1]).
 -export([record/2, to_basic_properties/1]).
 -export([addr/1, port/1]).
+-export([format_nulls/1]).
+-export([print/2, print/1]).
+
+-export([format_queue_stats/1, format_channel_stats/1,
+         format_arguments/1, format_connection_created/1,
+         format_accept_content/1, format_args/1]).
+
+-export([strip_queue_pids/1]).
 
 -import(rabbit_misc, [pget/2, pset/3]).
 
 -include_lib("rabbit_common/include/rabbit.hrl").
 -include_lib("rabbit_common/include/rabbit_framing.hrl").
 
--define(PIDS_TO_STRIP, [connection, owner_pid, channel,
-                        exclusive_consumer_pid]).
-
 %%--------------------------------------------------------------------
 
-format(Stats, Fs) ->
-    lists:concat([format_item(Stat, Fs) || {_Name, Value} = Stat <- Stats,
-                                           Value =/= unknown]).
-
-format_item(Stat, []) ->
-    [Stat];
-format_item({Name, Value}, [{Fun, Names} | Fs]) ->
-    case lists:member(Name, Names) of
-        true  -> case Fun(Value) of
-                     List when is_list(List) -> List;
-                     Formatted               -> [{Name, Formatted}]
-                 end;
-        false -> format_item({Name, Value}, Fs)
-    end.
+format(Stats, {[], _}) ->
+    [Stat || {_Name, Value} = Stat <- Stats, Value =/= unknown];
+format(Stats, {Fs, true}) ->
+    [Fs(Stat) || {_Name, Value} = Stat <- Stats, Value =/= unknown];
+format(Stats, {Fs, false}) ->
+    lists:concat([Fs(Stat) || {_Name, Value} = Stat <- Stats,
+                              Value =/= unknown]).
+
+format_queue_stats({reductions, _}) ->
+    [];
+format_queue_stats({exclusive_consumer_pid, _}) ->
+    [];
+format_queue_stats({slave_pids, ''}) ->
+    [];
+format_queue_stats({slave_pids, Pids}) ->
+    [{slave_nodes, [node(Pid) || Pid <- Pids]}];
+format_queue_stats({synchronised_slave_pids, ''}) ->
+    [];
+format_queue_stats({synchronised_slave_pids, Pids}) ->
+    [{synchronised_slave_nodes, [node(Pid) || Pid <- Pids]}];
+format_queue_stats({backing_queue_status, Value}) ->
+    [{backing_queue_status, properties(Value)}];
+format_queue_stats({idle_since, Value}) ->
+    [{idle_since, now_to_str(Value)}];
+format_queue_stats({state, Value}) ->
+    queue_state(Value);
+format_queue_stats(Stat) ->
+    [Stat].
+
+format_channel_stats({idle_since, Value}) ->
+    {idle_since, now_to_str(Value)};
+format_channel_stats(Stat) ->
+    Stat.
+
+format_arguments({arguments, Value}) ->
+    {arguments, amqp_table(Value)};
+format_arguments(Stat) ->
+    Stat.
+
+format_args({arguments, Value}) ->
+    {arguments, rabbit_mgmt_util:args(Value)};
+format_args(Stat) ->
+    Stat.
+
+format_connection_created({host, Value}) ->
+    {host, addr(Value)};
+format_connection_created({peer_host, Value}) ->
+    {peer_host, addr(Value)};
+format_connection_created({port, Value}) ->
+    {port, port(Value)};
+format_connection_created({peer_port, Value}) ->
+    {peer_port, port(Value)};
+format_connection_created({protocol, Value}) ->
+    {protocol, protocol(Value)};
+format_connection_created({client_properties, Value}) ->
+    {client_properties, amqp_table(Value)};
+format_connection_created(Stat) ->
+    Stat.
+
+format_exchange_and_queue({policy, Value}) ->
+    policy(Value);
+format_exchange_and_queue({arguments, Value}) ->
+    [{arguments, amqp_table(Value)}];
+format_exchange_and_queue({name, Value}) ->
+    resource(Value);
+format_exchange_and_queue(Stat) ->
+    [Stat].
+
+format_binding({source, Value}) ->
+    resource(source, Value);
+format_binding({arguments, Value}) ->
+    [{arguments, amqp_table(Value)}];
+format_binding(Stat) ->
+    [Stat].
+
+format_basic_properties({headers, Value}) ->
+    {headers, amqp_table(Value)};
+format_basic_properties(Stat) ->
+    Stat.
+
+format_accept_content({durable, Value}) ->
+    {durable, rabbit_mgmt_util:parse_bool(Value)};
+format_accept_content({auto_delete, Value}) ->
+    {auto_delete, rabbit_mgmt_util:parse_bool(Value)};
+format_accept_content({internal, Value}) ->
+    {internal, rabbit_mgmt_util:parse_bool(Value)};
+format_accept_content(Stat) ->
+    Stat.
 
 print(Fmt, Val) when is_list(Val) ->
     list_to_binary(lists:flatten(io_lib:format(Fmt, Val)));
 print(Fmt, Val) ->
     print(Fmt, [Val]).
 
-%% TODO - can we remove all these "unknown" cases? Coverage never hits them.
-
-remove(_) -> [].
-
-node_from_pid(Pid) when is_pid(Pid) -> [{node, node(Pid)}];
-node_from_pid('')                   -> [];
-node_from_pid(unknown)              -> [];
-node_from_pid(none)                 -> [].
+print(Val) when is_list(Val) ->
+    list_to_binary(lists:flatten(Val));
+print(Val) ->
+    Val.
 
-nodes_from_pids(Name) ->
-    fun('')   -> [];
-       (Pids) -> [{Name, [node(Pid) || Pid <- Pids]}]
-    end.
+%% TODO - can we remove all these "unknown" cases? Coverage never hits them.
 
 ip(unknown) -> unknown;
 ip(IP)      -> list_to_binary(rabbit_misc:ntoa(IP)).
@@ -130,21 +202,19 @@ protocol_version({Major, Minor, 0})        -> protocol_version({Major, Minor});
 protocol_version({Major, Minor, Revision}) -> io_lib:format("~B-~B-~B",
                                                     [Major, Minor, Revision]).
 
-now_to_ms(unknown) ->
-    unknown;
-now_to_ms(Now) ->
-    timer:now_diff(Now, {0,0,0}) div 1000.
-
 now_to_str(unknown) ->
     unknown;
-now_to_str(Now) ->
-    {{Y, M, D}, {H, Min, S}} = calendar:now_to_local_time(Now),
+now_to_str(MilliSeconds) ->
+    BaseDate = calendar:datetime_to_gregorian_seconds({{1970, 1, 1},
+                                                       {0, 0, 0}}),
+    Seconds = BaseDate + (MilliSeconds div 1000),
+    {{Y, M, D}, {H, Min, S}} = calendar:gregorian_seconds_to_datetime(Seconds),
     print("~w-~2.2.0w-~2.2.0w ~w:~2.2.0w:~2.2.0w", [Y, M, D, H, Min, S]).
 
 now_to_str_ms(unknown) ->
     unknown;
-now_to_str_ms(Now = {_, _, Micro}) ->
-    print("~s:~3.3.0w", [now_to_str(Now), Micro div 1000]).
+now_to_str_ms(MilliSeconds) ->
+    print("~s:~3.3.0w", [now_to_str(MilliSeconds), MilliSeconds rem 1000]).
 
 resource(unknown) -> unknown;
 resource(Res)     -> resource(name, Res).
@@ -158,9 +228,11 @@ policy('')     -> [];
 policy(Policy) -> [{policy, Policy}].
 
 internal_user(User) ->
-    [{name,          User#internal_user.username},
-     {password_hash, base64:encode(User#internal_user.password_hash)},
-     {tags,          tags(User#internal_user.tags)}].
+    [{name,              User#internal_user.username},
+     {password_hash,     base64:encode(User#internal_user.password_hash)},
+     {hashing_algorithm, rabbit_auth_backend_internal:hashing_module_for_user(
+                             User)},
+     {tags,              tags(User#internal_user.tags)}].
 
 user(User) ->
     [{name, User#user.username},
@@ -225,9 +297,7 @@ url(Fmt, Vals) ->
     print(Fmt, [mochiweb_util:quote_plus(V) || V <- Vals]).
 
 exchange(X) ->
-    format(X, [{fun resource/1,   [name]},
-               {fun amqp_table/1, [arguments]},
-               {fun policy/1,     [policy]}]).
+    format(X, {fun format_exchange_and_queue/1, false}).
 
 %% We get queues using rabbit_amqqueue:list/1 rather than :info_all/1 since
 %% the latter wakes up each queue. Therefore we have a record rather than a
@@ -243,13 +313,12 @@ queue(#amqqueue{name            = Name,
       [{name,        Name},
        {durable,     Durable},
        {auto_delete, AutoDelete},
+       {exclusive,   is_pid(ExclusiveOwner)},
        {owner_pid,   ExclusiveOwner},
        {arguments,   Arguments},
        {pid,         Pid},
        {state,       State}],
-      [{fun resource/1,   [name]},
-       {fun amqp_table/1, [arguments]},
-       {fun policy/1,     [policy]}]).
+      {fun format_exchange_and_queue/1, false}).
 
 queue_state({syncing, Msgs}) -> [{state,         syncing},
                                  {sync_messages, Msgs}];
@@ -269,12 +338,11 @@ binding(#binding{source      = S,
        {routing_key,      Key},
        {arguments,        Args},
        {properties_key, pack_binding_props(Key, Args)}],
-      [{fun (Res) -> resource(source, Res) end, [source]},
-       {fun amqp_table/1,                       [arguments]}]).
+      {fun format_binding/1, false}).
 
 basic_properties(Props = #'P_basic'{}) ->
     Res = record(Props, record_info(fields, 'P_basic')),
-    format(Res, [{fun amqp_table/1, [headers]}]).
+    format(Res, {fun format_basic_properties/1, true}).
 
 record(Record, Fields) ->
     {Res, _Ix} = lists:foldl(fun (K, {L, Ix}) ->
@@ -313,15 +381,73 @@ to_basic_properties(Props) ->
 a2b(A) ->
     list_to_binary(atom_to_list(A)).
 
+strip_queue_pids(Item) ->
+    strip_queue_pids(Item, []).
+
+strip_queue_pids([{_, unknown} | T], Acc) ->
+    strip_queue_pids(T, Acc);
+strip_queue_pids([{pid, Pid} | T], Acc) when is_pid(Pid) ->
+    strip_queue_pids(T, [{node, node(Pid)} | Acc]);
+strip_queue_pids([{pid, _} | T], Acc) ->
+    strip_queue_pids(T, Acc);
+strip_queue_pids([{owner_pid, _} | T], Acc) ->
+    strip_queue_pids(T, Acc);
+strip_queue_pids([Any | T], Acc) ->
+    strip_queue_pids(T, [Any | Acc]);
+strip_queue_pids([], Acc) ->
+    Acc.
+
 %% Items can be connections, channels, consumers or queues, hence remove takes
 %% various items.
 strip_pids(Item = [T | _]) when is_tuple(T) ->
-    format(Item,
-           [{fun node_from_pid/1, [pid]},
-            {fun remove/1,        ?PIDS_TO_STRIP},
-            {nodes_from_pids(slave_nodes), [slave_pids]},
-            {nodes_from_pids(synchronised_slave_nodes),
-             [synchronised_slave_pids]}
-           ]);
+    strip_pids(Item, []);
 
 strip_pids(Items) -> [strip_pids(I) || I <- Items].
+
+strip_pids([{_, unknown} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{pid, Pid} | T], Acc) when is_pid(Pid) ->
+    strip_pids(T, [{node, node(Pid)} | Acc]);
+strip_pids([{pid, _} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{connection, _} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{owner_pid, _} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{channel, _} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{exclusive_consumer_pid, _} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{slave_pids, ''} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{slave_pids, Pids} | T], Acc) ->
+    strip_pids(T, [{slave_nodes, [node(Pid) || Pid <- Pids]} | Acc]);
+strip_pids([{synchronised_slave_pids, ''} | T], Acc) ->
+    strip_pids(T, Acc);
+strip_pids([{synchronised_slave_pids, Pids} | T], Acc) ->
+    strip_pids(T, [{synchronised_slave_nodes, [node(Pid) || Pid <- Pids]} | Acc]);
+strip_pids([Any | T], Acc) ->
+    strip_pids(T, [Any | Acc]);
+strip_pids([], Acc) ->
+    Acc.
+
+%% Format for JSON replies. Transforms '' into null
+format_nulls(Items) when is_list(Items) ->
+    [format_null_item(Pair) || Pair <- Items];
+format_nulls(Item) ->
+    format_null_item(Item).
+
+format_null_item({Key, ''}) ->
+    {Key, null};
+format_null_item({Key, Value}) when is_list(Value) ->
+    {Key, format_nulls(Value)};
+format_null_item({Key, {struct, Struct}}) ->
+    {Key, {struct, format_nulls(Struct)}};
+format_null_item({Key, {array, Struct}}) ->
+    {Key, {array, format_nulls(Struct)}};
+format_null_item({Key, Value}) ->
+    {Key, Value};
+format_null_item([{_K, _V} | _T] = L) ->
+    format_nulls(L);
+format_null_item(Value) ->
+    Value.
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_load_definitions.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl
index d12f545261dae14613085fabe6ce2d4b714bad08..d52e8ac03e9af5f35d4a51a00ec977cd5914c1a3 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_load_definitions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_queue_stats_collector.erl b/deps/rabbitmq_management/src/rabbit_mgmt_queue_stats_collector.erl
new file mode 100644 (file)
index 0000000..1d4602e
--- /dev/null
@@ -0,0 +1,120 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_queue_stats_collector).
+
+-include("rabbit_mgmt.hrl").
+-include("rabbit_mgmt_metrics.hrl").
+-include("rabbit_mgmt_event_collector.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-behaviour(gen_server2).
+
+-export([start_link/0]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+         code_change/3, handle_pre_hibernate/1]).
+
+-export([prioritise_cast/3]).
+
+-import(rabbit_misc, [pget/3]).
+-import(rabbit_mgmt_db, [pget/2, id_name/1, id/2, lookup_element/2]).
+
+prioritise_cast({event, #event{type = queue_stats}}, Len,
+                #state{max_backlog = MaxBacklog} = _State)
+  when Len > MaxBacklog ->
+    drop;
+prioritise_cast(_Msg, _Len, _State) ->
+    0.
+
+%% See the comment on rabbit_mgmt_db for the explanation of
+%% events and stats.
+
+%% Although this gen_server could process all types of events through the
+%% handle_cast, rabbit_mgmt_db_handler (in the management agent) forwards
+%% only the non-prioritiy events channel_stats
+%%----------------------------------------------------------------------------
+%% API
+%%----------------------------------------------------------------------------
+
+start_link() ->
+    case gen_server2:start_link({global, ?MODULE}, ?MODULE, [], []) of
+        {ok, Pid} -> register(?MODULE, Pid), %% [1]
+                     {ok, Pid};
+        Else      -> Else
+    end.
+%% [1] For debugging it's helpful to locally register the name too
+%% since that shows up in places global names don't.
+
+%%----------------------------------------------------------------------------
+%% Internal, gen_server2 callbacks
+%%----------------------------------------------------------------------------
+
+init([]) ->
+    {ok, Interval} = application:get_env(rabbit, collect_statistics_interval),
+    {ok, RatesMode} = application:get_env(rabbitmq_management, rates_mode),
+    {ok, MaxBacklog} = application:get_env(rabbitmq_management,
+                                           stats_event_max_backlog),
+    process_flag(priority, high),
+    rabbit_log:info("Statistics queue stats collector started.~n"),
+    {ok, reset_lookups(
+           #state{interval               = Interval,
+                  rates_mode             = RatesMode,
+                  max_backlog            = MaxBacklog}), hibernate,
+     {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
+
+%% Used in rabbit_mgmt_test_db where we need guarantees events have
+%% been handled before querying
+handle_call({event, Event = #event{reference = none}}, _From, State) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    reply(ok, State);
+
+handle_call(_Request, _From, State) ->
+    reply(not_understood, State).
+
+%% Only handle events that are real.
+handle_cast({event, Event = #event{reference = none}}, State) ->
+    rabbit_mgmt_event_collector_utils:handle_event(Event, State),
+    noreply(State);
+
+handle_cast(_Request, State) ->
+    noreply(State).
+
+handle_info(_Info, State) ->
+    noreply(State).
+
+terminate(_Arg, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+reply(Reply, NewState) -> {reply, Reply, NewState, hibernate}.
+noreply(NewState) -> {noreply, NewState, hibernate}.
+
+reset_lookups(State) ->
+    State#state{lookups = [{exchange, fun rabbit_exchange:lookup/1},
+                           {queue,    fun rabbit_amqqueue:lookup/1}]}.
+
+handle_pre_hibernate(State) ->
+    %% rabbit_event can end up holding on to some memory after a busy
+    %% workout, but it's not a gen_server so we can't make it
+    %% hibernate. The best we can do is forcibly GC it here (if
+    %% rabbit_mgmt_db is hibernating the odds are rabbit_event is
+    %% quiescing in some way too).
+    rpc:multicall(
+      rabbit_mnesia:cluster_nodes(running), rabbit_mgmt_db_handler, gc, []),
+    {hibernate, State}.
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_reset_handler.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl
index e16351f439cba62e0e84545ddaae7c6665214627..40008f0f6d7b2d48021235fe85a17cdf067459bd 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% When management extensions are enabled and/or disabled at runtime, the
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_stats.erl b/deps/rabbitmq_management/src/rabbit_mgmt_stats.erl
new file mode 100644 (file)
index 0000000..af8f310
--- /dev/null
@@ -0,0 +1,978 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Plugin.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2010-2012 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_stats).
+
+-include("rabbit_mgmt.hrl").
+-include("rabbit_mgmt_metrics.hrl").
+
+-export([blank/1, is_blank/3, record/5, format/5, sum/1, gc/3,
+         free/1, delete_stats/2, get_keys/2]).
+
+-import(rabbit_misc, [pget/2]).
+
+-define(ALWAYS_REPORT, [queue_msg_counts, coarse_node_stats]).
+-define(MICRO_TO_MILLI, 1000).
+
+%% Data is stored in ETS tables:
+%% * one set of ETS tables per event (queue_stats, queue_exchange_stats...)
+%% * each set contains one table per group of events (queue_msg_rates,
+%%   deliver_get, fine_stats...) such as aggr_queue_stats_deliver_get
+%%   (see ?AGGR_TABLES in rabbit_mgmt_metrics.hrl)
+%% * data is then stored as a tuple (not a record) to take advantage of the
+%%   atomic call ets:update_counter/3. The equivalent records are noted in
+%%   rabbit_mgmt_metrics.hrl to get the position and as reference for developers
+%% * Records are of the shape:
+%%    {{Id, base}, Field1, Field2, ....} 
+%%    {{Id, total}, Field1, Field2, ....} 
+%%    {{Id, Timestamp}, Field1, Field2, ....} 
+%%    where Id can be a simple key or a tuple {Id0, Id1} 
+%%
+%% This module is not generic any longer, any new event or field needs to be
+%% manually added, but it increases the performance and allows concurrent
+%% GC, event collection and querying
+%%
+
+%%----------------------------------------------------------------------------
+%% External functions
+%%----------------------------------------------------------------------------
+
+blank(Name) ->
+    ets:new(rabbit_mgmt_stats_tables:index(Name),
+            [bag, public, named_table]),
+    ets:new(rabbit_mgmt_stats_tables:key_index(Name),
+            [ordered_set, public, named_table]),
+    ets:new(Name, [set, public, named_table]).
+
+is_blank({Table, _, _}, Id, Record) ->
+    is_blank(Table, Id, Record);
+is_blank(Table, Id, Record) ->
+    case ets:lookup(Table, {Id, total}) of
+        [] ->
+            true;
+        [Total] ->
+            case lists:member(Record, ?ALWAYS_REPORT) of
+                true -> false;
+                false -> is_blank(Total)
+            end
+    end.
+
+%%----------------------------------------------------------------------------
+free({Table, IndexTable, KeyIndexTable}) ->
+    ets:delete(Table),
+    ets:delete(IndexTable),
+    ets:delete(KeyIndexTable).
+
+delete_stats(Table, {'_', _} = Id) ->
+    delete_complex_stats(Table, Id);
+delete_stats(Table, {_, '_'} = Id) ->
+    delete_complex_stats(Table, Id);
+delete_stats(Table, Id) ->
+    Keys = full_indexes(Table, Id),
+    ets:delete(rabbit_mgmt_stats_tables:index(Table), Id),
+    ets:delete(rabbit_mgmt_stats_tables:key_index(Table), Id),
+    [ets:delete(Table, Key) || Key <- Keys].
+
+delete_complex_stats(Table, Id) ->
+    Ids = ets:select(rabbit_mgmt_stats_tables:key_index(Table),
+                     match_spec_key_index(Id)),
+    delete_complex_stats_loop(Table, Ids).
+
+delete_complex_stats_loop(_Table, []) ->
+    ok;
+delete_complex_stats_loop(Table, [{Id} | Ids]) ->
+    delete_stats(Table, Id),
+    delete_complex_stats_loop(Table, Ids).
+
+%%----------------------------------------------------------------------------
+get_keys(Table, Id0) ->
+    ets:select(rabbit_mgmt_stats_tables:key_index(Table), match_spec_keys(Id0)).
+
+%%----------------------------------------------------------------------------
+%% Event-time
+%%----------------------------------------------------------------------------
+
+record({Id, _TS} = Key, Pos, Diff, Record, Table) ->
+    ets_update(Table, Key, Record, Pos, Diff),
+    ets_update(Table, {Id, total}, Record, Pos, Diff).
+
+%%----------------------------------------------------------------------------
+%% Query-time
+%%----------------------------------------------------------------------------
+
+format(no_range, Table, Id, Interval, Type) ->
+    Now = time_compat:os_system_time(milli_seconds),
+    Counts = get_value(Table, Id, total, Type),
+    RangePoint = ((Now div Interval) * Interval) - Interval,
+    {Record, Factor} = format_rate_with(
+                         Table, Id, RangePoint, Interval, Interval, Type),
+    format_rate(Type, Record, Counts, Factor);
+
+format(Range, Table, Id, Interval, Type) ->
+    Base = get_value(Table, Id, base, Type),
+    RangePoint = Range#range.last - Interval,
+    {Samples, Counts} = extract_samples(Range, Base, Table, Id, Type),
+    {Record, Factor} = format_rate_with(
+                         Table, Id, RangePoint, Range#range.incr, Interval, Type),
+    format_rate(Type, Record, Counts, Samples, Factor).
+
+sum([]) -> blank();
+
+sum([{T1, Id} | StatsN]) ->
+    {Table, IndexTable, KeyIndexTable} = T = blank(),
+    AllIds = full_indexes(T1, Id),
+    lists:foreach(fun(Index) ->
+                          case ets:lookup(T1, Index) of
+                              [V] ->
+                                  {_, TS} = element(1, V),
+                                  ets:insert(Table, setelement(1, V, {all, TS})),
+                                  insert_index(IndexTable, KeyIndexTable, {all, TS});
+                              [] -> %% base
+                                  ok
+                          end
+                  end, AllIds),
+    sum(StatsN, T).
+
+sum(StatsN, T) ->
+    lists:foreach(
+      fun ({T1, Id}) ->
+              AllIds = full_indexes(T1, Id),
+              lists:foreach(fun(Index) ->
+                                    case ets:lookup(T1, Index) of
+                                        [V] ->
+                                            {_, TS} = element(1, V),
+                                            ets_update(T, {all, TS}, V);
+                                        [] -> %% base
+                                            ok
+                                    end
+                            end, AllIds)
+      end, StatsN),
+    T.
+
+gc(Cutoff, Table, Id) ->
+    gc(Cutoff, lists:reverse(indexes(Table, Id)), Table, undefined).
+
+%%----------------------------------------------------------------------------
+%% Internal functions
+%%----------------------------------------------------------------------------
+format_rate_with({Table, IndexTable, _KeyIndexTable}, Id, RangePoint, Incr,
+                 Interval, Type) ->
+    format_rate_with(Table, IndexTable, Id, RangePoint, Incr, Interval, Type);
+format_rate_with(Table, Id, RangePoint, Incr, Interval, Type) ->
+    format_rate_with(Table, rabbit_mgmt_stats_tables:index(Table), Id,
+                     RangePoint, Incr, Interval, Type).
+
+format_rate_with(Table, IndexTable, Id, RangePoint, Incr, Interval, Type) ->
+    case second_largest(Table, IndexTable, Id) of
+        [S] ->
+            {_, TS} = element(1, S),
+            case TS - RangePoint of %% [0]
+                D when D =< Incr andalso D >= 0 -> {S, Interval};
+                _                               -> {S, 0.0}
+            end;
+        _ ->
+            {empty(Id, Type), 0.0}
+    end.
+
+%% [0] Only display the rate if it's live - i.e. ((the end of the
+%% range) - interval) corresponds to the second to last data point we
+%% have. If the end of the range is earlier we have gone silent, if
+%% it's later we have been asked for a range back in time (in which
+%% case showing the correct instantaneous rate would be quite a faff,
+%% and probably unwanted). Why the second to last? Because data is
+%% still arriving for the last...
+second_largest(Table, IndexTable, Id) ->
+    case ets:lookup(IndexTable, Id) of
+        [_, _ | _] = List ->
+            ets:lookup(Table, sl(List, {none, 0}, {none, 0}));
+        _ ->
+            unknown
+    end.
+
+sl([{_, TS} = H | T], {_, T1} = L1, _L2) when TS > T1 ->
+    sl(T, H, L1);
+sl([{_, TS} = H | T], L1, {_, T2}) when TS > T2 ->
+    sl(T, L1, H);
+sl([_ | T], L1, L2) ->
+    sl(T, L1, L2);
+sl([], _L1, L2) ->
+    L2.
+
+%% What we want to do here is: given the #range{}, provide a set of
+%% samples such that we definitely provide a set of samples which
+%% covers the exact range requested, despite the fact that we might
+%% not have it. We need to spin up over the entire range of the
+%% samples we *do* have since they are diff-based (and we convert to
+%% absolute values here).
+extract_samples(Range, Base, Table, Id, Type) ->
+    %% In order to calculate the average operation time for some of the node
+    %% metrics, it needs to carry around the last raw sample taken (before
+    %% calculations). This is the first element of the 'Samples' tuple.
+    %% It is initialised to the base, which is updated with the latest value until
+    %% it finds the first valid sample. Thus, generating an instant rate for it.
+    %% Afterwards, it will store the last raw sample.
+    extract_samples0(Range, Base, indexes(Table, Id), Table, Type,
+                     {Base, empty_list(Type)}).
+
+extract_samples0(Range = #range{first = Next}, Base, [], Table, Type, Samples) ->
+    %% [3] Empty or finished table
+    extract_samples1(Range, Base, empty({unused_id, Next}, Type), [], Table, Type,
+                     Samples);
+extract_samples0(Range, Base, [Index | List], Tab, Type, Samples) ->
+    Table = case Tab of
+               {T, _, _} ->
+                   T;
+               T ->
+                   T
+           end,
+    case ets:lookup(Table, Index) of
+        [S] ->
+            extract_samples1(Range, Base, S, List, Table, Type, Samples);
+        [] ->
+            extract_samples0(Range, Base, List, Table, Type, Samples)
+    end.
+
+extract_samples1(Range = #range{first = Next, last = Last, incr = Incr},
+                 Base, S, List, Table, Type, {LastRawSample, Samples}) ->
+    {_, TS} = element(1, S),
+    if
+        %% We've gone over the range. Terminate.
+        Next > Last ->
+            %% Drop the raw sample
+            {Samples, Base};
+        %% We've hit bang on a sample. Record it and move to the next.
+        Next =:= TS ->
+            %% The new base is the last sample used to generate instant rates
+            %% in the node stats
+            NewBase = add_record(Base, S),
+            extract_samples0(Range#range{first = Next + Incr}, NewBase, List,
+                             Table, Type, {NewBase, append(NewBase, Samples, Next,
+                                                           LastRawSample)});
+        %% We haven't yet hit the beginning of our range.
+        Next > TS ->
+            NewBase = add_record(Base, S),
+            %% Roll the latest value until we find the first sample
+            RawSample = case element(2, Samples) of
+                            [] -> NewBase;
+                            _ -> LastRawSample
+                        end,
+            extract_samples0(Range, NewBase, List, Table, Type,
+                             {RawSample, Samples});
+        %% We have a valid sample, but we haven't used it up
+        %% yet. Append it and loop around.
+        Next < TS ->
+            %% Pass the last raw sample to calculate instant node stats
+            extract_samples1(Range#range{first = Next + Incr}, Base, S,
+                             List, Table, Type,
+                             {Base, append(Base, Samples, Next, LastRawSample)})
+    end.
+
+append({_Key, V1}, {samples, V1s}, TiS, _LastRawSample) ->
+    {samples, append_sample(V1, TiS, V1s)};
+append({_Key, V1, V2}, {samples, V1s, V2s}, TiS, _LastRawSample) ->
+    {samples, append_sample(V1, TiS, V1s), append_sample(V2, TiS, V2s)};
+append({_Key, V1, V2, V3}, {samples, V1s, V2s, V3s}, TiS, _LastRawSample) ->
+    {samples, append_sample(V1, TiS, V1s), append_sample(V2, TiS, V2s),
+     append_sample(V3, TiS, V3s)};
+append({_Key, V1, V2, V3, V4}, {samples, V1s, V2s, V3s, V4s}, TiS, _LastRawSample) ->
+    {samples, append_sample(V1, TiS, V1s), append_sample(V2, TiS, V2s),
+     append_sample(V3, TiS, V3s), append_sample(V4, TiS, V4s)};
+append({_Key, V1, V2, V3, V4, V5, V6, V7, V8},
+       {samples, V1s, V2s, V3s, V4s, V5s, V6s, V7s, V8s}, TiS, _LastRawSample) ->
+    {samples, append_sample(V1, TiS, V1s), append_sample(V2, TiS, V2s),
+     append_sample(V3, TiS, V3s), append_sample(V4, TiS, V4s),
+     append_sample(V5, TiS, V5s), append_sample(V6, TiS, V6s),
+     append_sample(V7, TiS, V7s), append_sample(V8, TiS, V8s)};
+append({_Key, V1, V2, V3, V4, V5, V6, V7, V8, V9, V10, V11, V12, V13, V14, V15,
+        V16, V17, V18, V19, V20, V21, V22, V23, V24, V25, V26, V27, V28},
+       {samples, V1s, V2s, V3s, V4s, V5s, V6s, V7s, V8s, V9s, V10s, V11s, V12s,
+        V13s, V14s, V15s, V16s, V17s, V18s, V19s, V20s, V21s, V22s, V23s, V24s,
+        V25s, V26s, V27s, V28s},
+       TiS,
+       {_, _V1r, _V2r, _V3r, _V4r, _V5r, V6r, _V7r, V8r, V9r, _V10r, V11r,
+        V12r, V13r, V14r, V15r, _V16r, _V17r, _V18r, _V19r, _V20r, _V21r,
+        _V22r, _V23r, _V24r, _V25r, _V26r, V27r, V28r}) ->
+    %% This clause covers the coarse node stats, which must calculate the average
+    %% operation times for read, write, sync and seek. These differ from any other
+    %% statistic and must be caculated using the total time and counter of operations.
+    %% By calculating the new sample against the last sampled point, we provide
+    %% instant averages that truly reflect the behaviour of the system
+    %% during that space of time.
+    {samples, append_sample(V1, TiS, V1s), append_sample(V2, TiS, V2s),
+     append_sample(V3, TiS, V3s), append_sample(V4, TiS, V4s),
+     append_sample(V5, TiS, V5s), append_sample(V6, TiS, V6s),
+     append_sample(V7, TiS, V7s),
+     append_sample(avg_time(V8, V6, V8r, V6r), TiS, V8s),
+     append_sample(V9, TiS, V9s), append_sample(V10, TiS, V10s),
+     append_sample(avg_time(V11, V9, V11r, V9r), TiS, V11s),
+     append_sample(V12, TiS, V12s),
+     append_sample(avg_time(V13, V12, V13r, V12r), TiS, V13s),
+     append_sample(V14, TiS, V14s),
+     append_sample(avg_time(V15, V14, V15r, V14r), TiS, V15s),
+     append_sample(V16, TiS, V16s),
+     append_sample(V17, TiS, V17s), append_sample(V18, TiS, V18s),
+     append_sample(V19, TiS, V19s), append_sample(V20, TiS, V20s),
+     append_sample(V21, TiS, V21s), append_sample(V22, TiS, V22s),
+     append_sample(V23, TiS, V23s), append_sample(V24, TiS, V24s),
+     append_sample(V25, TiS, V25s), append_sample(V26, TiS, V26s),
+     append_sample(V27, TiS, V27s),
+     append_sample(avg_time(V28, V27, V28r, V27r), TiS, V28s)}.
+
+append_sample(S, TS, List) ->
+    [[{sample, S}, {timestamp, TS}] | List].
+
+blank() ->
+    Table = ets:new(rabbit_mgmt_stats, [ordered_set, public]),
+    Index = ets:new(rabbit_mgmt_stats, [bag, public]),
+    KeyIndex = ets:new(rabbit_mgmt_stats, [ordered_set, public]),
+    {Table, Index, KeyIndex}.
+
+%%----------------------------------------------------------------------------
+%% Event-GCing
+%%----------------------------------------------------------------------------
+%% Go through the list, amalgamating all too-old samples with the next
+%% newest keepable one [0] (we move samples forward in time since the
+%% semantics of a sample is "we had this many x by this time"). If the
+%% sample is too old, but would not be too old if moved to a rounder
+%% timestamp which does not exist then invent one and move it there
+%% [1]. But if it's just outright too old, move it to the base [2].
+gc(_Cutoff, [], _Table, _Keep) ->
+    ok;
+gc(Cutoff, [Index | T], Table, Keep) ->
+    case ets:lookup(Table, Index) of
+        [S] ->
+            {Id, TS} = Key = element(1, S),
+            Keep1 = case keep(Cutoff, TS) of
+                        keep ->
+                            TS;
+                        drop -> %% [2]
+                            ets_update(Table, {Id, base}, S),
+                            ets_delete_value(Table, Key),
+                            Keep;
+                        {move, D} when Keep =:= undefined -> %% [1]
+                            ets_update(Table, {Id, TS + D}, S),
+                            ets_delete_value(Table, Key),
+                            TS + D;
+                        {move, _} -> %% [0]
+                            ets_update(Table, {Id, Keep}, S),
+                            ets_delete_value(Table, Key),
+                            Keep
+                    end,
+            gc(Cutoff, T, Table, Keep1);
+        _ ->
+            gc(Cutoff, T, Table, Keep)
+    end.
+
+keep({Policy, Now}, TS) ->
+    lists:foldl(fun ({AgeSec, DivisorSec}, Action) ->
+                        prefer_action(
+                          Action,
+                          case (Now - TS) =< (AgeSec * 1000) of
+                              true  -> DivisorMillis = DivisorSec * 1000,
+                                       case TS rem DivisorMillis of
+                                           0   -> keep;
+                                           Rem -> {move, DivisorMillis - Rem}
+                                       end;
+                              false -> drop
+                          end)
+                end, drop, Policy).
+
+prefer_action(keep,              _) -> keep;
+prefer_action(_,              keep) -> keep;
+prefer_action({move, A}, {move, B}) -> {move, lists:min([A, B])};
+prefer_action({move, A},      drop) -> {move, A};
+prefer_action(drop,      {move, A}) -> {move, A};
+prefer_action(drop,           drop) -> drop.
+
+%%----------------------------------------------------------------------------
+%% ETS update
+%%----------------------------------------------------------------------------
+ets_update(Table, K, R, P, V) ->
+    try
+        ets:update_counter(Table, K, {P, V})
+    catch
+        _:_ ->
+            ets:insert(Table, new_record(K, R, P, V)),
+            insert_index(Table, K)
+    end.
+
+insert_index(Table, Key) ->
+    insert_index(rabbit_mgmt_stats_tables:index(Table),
+                 rabbit_mgmt_stats_tables:key_index(Table),
+                 Key).
+
+insert_index(_, _, {_, V}) when is_atom(V) ->
+    ok;
+insert_index(Index, KeyIndex, {Id, _TS} = Key) ->
+    ets:insert(Index, Key),
+    ets:insert(KeyIndex, {Id}).
+
+ets_update({Table, IndexTable, KeyIndexTable}, Key, Record) ->
+    try
+        ets:update_counter(Table, Key, record_to_list(Record))
+    catch
+        _:_ ->
+            ets:insert(Table, setelement(1, Record, Key)),
+            insert_index(IndexTable, KeyIndexTable, Key)
+    end;
+ets_update(Table, Key, Record) ->
+    try
+        ets:update_counter(Table, Key, record_to_list(Record))
+    catch
+        _:_ ->
+            ets:insert(Table, setelement(1, Record, Key)),
+            insert_index(Table, Key)
+    end.
+
+new_record(K, deliver_get, P, V) ->
+    setelement(P, {K, 0, 0, 0, 0}, V);
+new_record(K, fine_stats, P, V) ->
+    setelement(P, {K, 0, 0, 0, 0, 0, 0, 0, 0}, V);
+new_record(K, queue_msg_rates, P, V) ->
+    setelement(P, {K, 0, 0}, V);
+new_record(K, queue_msg_counts, P, V) ->
+    setelement(P, {K, 0, 0, 0}, V);
+new_record(K, coarse_node_stats, P, V) ->
+    setelement(P, {K, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+                   0, 0, 0, 0, 0, 0, 0, 0, 0}, V);
+new_record(K, coarse_node_node_stats, P, V) ->
+    setelement(P, {K, 0, 0}, V);
+new_record(K, coarse_conn_stats, P, V) ->
+    setelement(P, {K, 0, 0}, V);
+new_record(K, process_stats, P, V) ->
+    setelement(P, {K, 0}, V).
+
+%% Returns a list of {Position, Increment} to update the current record
+record_to_list({_Key, V1}) ->
+    [{2, V1}];
+record_to_list({_Key, V1, V2}) ->
+    [{2, V1}, {3, V2}];
+record_to_list({_Key, V1, V2, V3}) ->
+    [{2, V1}, {3, V2}, {4, V3}];
+record_to_list({_Key, V1, V2, V3, V4}) ->
+    [{2, V1}, {3, V2}, {4, V3}, {5, V4}];
+record_to_list({_Key, V1, V2, V3, V4, V5, V6, V7, V8}) ->
+    [{2, V1}, {3, V2}, {4, V3}, {5, V4}, {6, V5}, {7, V6}, {8, V7}, {9, V8}];
+record_to_list({_Key, V1, V2, V3, V4, V5, V6, V7, V8, V9, V10, V11, V12,
+                V13, V14, V15, V16, V17, V18, V19, V20, V21, V22, V23, V24, V25,
+                V26, V27, V28}) ->
+    [{2, V1}, {3, V2}, {4, V3}, {5, V4}, {6, V5}, {7, V6}, {8, V7}, {9, V8},
+     {10, V9}, {11, V10}, {12, V11}, {13, V12}, {14, V13}, {15, V14},
+     {16, V15}, {17, V16}, {18, V17}, {19, V18}, {20, V19}, {21, V20},
+     {22, V21}, {23, V22}, {24, V23}, {25, V24}, {26, V25}, {27, V26},
+     {28, V27}, {29, V28}].
+
+%%----------------------------------------------------------------------------
+
+get_value({Table, _, _}, Id, Tag, Type) ->
+    get_value(Table, Id, Tag, Type);
+get_value(Table, Id, Tag, Type) ->
+    Key = {Id, Tag},
+    case ets:lookup(Table, Key) of
+        [] -> empty(Key, Type);
+        [Elem] -> Elem
+    end.
+
+ets_delete_value(Table, Key) ->
+    ets:delete_object(rabbit_mgmt_stats_tables:index(Table), Key),
+    ets:delete(Table, Key).
+
+indexes({_, Index, _}, Id) ->
+    lists:sort(ets:lookup(Index, Id));
+indexes(Table, Id) ->
+    lists:sort(ets:lookup(rabbit_mgmt_stats_tables:index(Table), Id)).
+
+full_indexes(Table, Id) ->
+    full_indexes(Table, rabbit_mgmt_stats_tables:index(Table), Id).
+
+full_indexes(_Table, Index, Id) ->
+    Indexes = ets:lookup(Index, Id),
+    [{Id, base}, {Id, total} | Indexes].
+
+%%----------------------------------------------------------------------------
+%% Match specs to select or delete from the ETS tables
+%%----------------------------------------------------------------------------
+match_spec_key_index(Id) ->
+    MatchHead = {partial_match(Id)},
+    Id0 = to_simple_match_spec(Id),
+    [{MatchHead, [{'==', Id0, '$1'}],['$_']}].
+
+partial_match({_Id0, '_'}) ->
+    {'$1', '_'};
+partial_match({'_', _Id1}) ->
+    {'_', '$1'}.
+
+to_simple_match_spec({Id0, '_'}) when is_tuple(Id0) ->
+    {Id0};
+to_simple_match_spec({'_', Id1}) when is_tuple(Id1) ->
+    {Id1};
+to_simple_match_spec({Id0, '_'}) ->
+    Id0;
+to_simple_match_spec({'_', Id1}) ->
+    Id1;
+to_simple_match_spec(Id) when is_tuple(Id) ->
+    {Id};
+to_simple_match_spec(Id) ->
+    Id.
+
+to_match_condition({'_', Id1}) when is_tuple(Id1) ->
+    {'==', {Id1}, '$2'};
+to_match_condition({'_', Id1}) ->
+    {'==', Id1, '$2'};
+to_match_condition({Id0, '_'}) when is_tuple(Id0) ->
+    {'==', {Id0}, '$1'};
+to_match_condition({Id0, '_'}) ->
+    {'==', Id0, '$1'}.
+
+match_spec_keys(Id) ->
+    MatchCondition = to_match_condition(Id),
+    MatchHead = {{'$1', '$2'}},
+    [{MatchHead, [MatchCondition], [{{'$1', '$2'}}]}].
+
+%%----------------------------------------------------------------------------
+%% Format output
+%%----------------------------------------------------------------------------
+format_rate(deliver_get, {_, D, DN, G, GN}, {_, TD, TDN, TG, TGN}, Factor) ->
+    [
+     {deliver, TD}, {deliver_details, [{rate, apply_factor(D, Factor)}]},
+     {deliver_no_ack, TDN},
+     {deliver_no_ack_details, [{rate, apply_factor(DN, Factor)}]},
+     {get, TG}, {get_details, [{rate, apply_factor(G, Factor)}]},
+     {get_no_ack, TGN},
+     {get_no_ack_details, [{rate, apply_factor(GN, Factor)}]}
+    ];
+format_rate(fine_stats, {_, P, PI, PO, A, D, C, RU, R},
+            {_, TP, TPI, TPO, TA, TD, TC, TRU, TR}, Factor) ->
+    [
+     {publish, TP}, {publish_details, [{rate, apply_factor(P, Factor)}]},
+     {publish_in, TPI},
+      {publish_in_details, [{rate, apply_factor(PI, Factor)}]},
+     {publish_out, TPO},
+     {publish_out_details, [{rate, apply_factor(PO, Factor)}]},
+     {ack, TA}, {ack_details, [{rate, apply_factor(A, Factor)}]},
+     {deliver_get, TD}, {deliver_get_details, [{rate, apply_factor(D, Factor)}]},
+     {confirm, TC}, {confirm_details, [{rate, apply_factor(C, Factor)}]},
+     {return_unroutable, TRU},
+     {return_unroutable_details, [{rate, apply_factor(RU, Factor)}]},
+     {redeliver, TR}, {redeliver_details, [{rate, apply_factor(R, Factor)}]}
+    ];
+format_rate(queue_msg_rates, {_, R, W}, {_, TR, TW}, Factor) ->
+    [
+     {disk_reads, TR}, {disk_reads_details, [{rate, apply_factor(R, Factor)}]},
+     {disk_writes, TW}, {disk_writes_details, [{rate, apply_factor(W, Factor)}]}
+    ];
+format_rate(queue_msg_counts, {_, M, MR, MU}, {_, TM, TMR, TMU}, Factor) ->
+    [
+     {messages, TM},
+     {messages_details, [{rate, apply_factor(M, Factor)}]},
+     {messages_ready, TMR},
+     {messages_ready_details, [{rate, apply_factor(MR, Factor)}]},
+     {messages_unacknowledged, TMU},
+     {messages_unacknowledged_details, [{rate, apply_factor(MU, Factor)}]}
+    ];
+format_rate(coarse_node_stats,
+            {_, M, F, S, P, D, IR, IB, IA, IWC, IWB, IWAT, IS, ISAT, ISC,
+             ISEAT, IRC, MRTC, MDTC, MSRC, MSWC, QIJWC, QIWC, QIRC, GC, GCW, CS,
+             IO, IOAT},
+            {_, TM, TF, TS, TP, TD, TIR, TIB, TIA, TIWC, TIWB, TIWAT, TIS,
+             TISAT, TISC, TISEAT, TIRC, TMRTC, TMDTC, TMSRC, TMSWC, TQIJWC,
+             TQIWC, TQIRC, TGC, TGCW, TCS, TIO, TIOAT}, Factor) ->
+    %% Calculates average times for read/write/sync/seek from the
+    %% accumulated time and count
+    %% io_<op>_avg_time is the average operation time for the life of the node
+    %% io_<op>_avg_time_details/rate is the average operation time during the
+    %% last time unit calculated (thus similar to an instant rate)
+    [
+     {mem_used, TM},
+     {mem_used_details, [{rate, apply_factor(M, Factor)}]},
+     {fd_used, TF},
+     {fd_used_details, [{rate, apply_factor(F, Factor)}]},
+     {sockets_used, TS},
+     {sockets_used_details, [{rate, apply_factor(S, Factor)}]},
+     {proc_used, TP},
+     {proc_used_details, [{rate, apply_factor(P, Factor)}]},
+     {disk_free, TD},
+     {disk_free_details, [{rate, apply_factor(D, Factor)}]},
+     {io_read_count, TIR},
+     {io_read_count_details, [{rate, apply_factor(IR, Factor)}]},
+     {io_read_bytes, TIB},
+     {io_read_bytes_details, [{rate, apply_factor(IB, Factor)}]},
+     {io_read_avg_time, avg_time(TIA, TIR)},
+     {io_read_avg_time_details, [{rate, avg_time(IA, IR)}]},
+     {io_write_count, TIWC},
+     {io_write_count_details, [{rate, apply_factor(IWC, Factor)}]},
+     {io_write_bytes, TIWB},
+     {io_write_bytes_details, [{rate, apply_factor(IWB, Factor)}]},
+     {io_write_avg_time, avg_time(TIWAT, TIWC)},
+     {io_write_avg_time_details, [{rate, avg_time(IWAT, IWC)}]},
+     {io_sync_count, TIS},
+     {io_sync_count_details, [{rate, apply_factor(IS, Factor)}]},
+     {io_sync_avg_time, avg_time(TISAT, TIS)},
+     {io_sync_avg_time_details, [{rate, avg_time(ISAT, IS)}]},
+     {io_seek_count, TISC},
+     {io_seek_count_details, [{rate, apply_factor(ISC, Factor)}]},
+     {io_seek_avg_time, avg_time(TISEAT, TISC)},
+     {io_seek_avg_time_details, [{rate, avg_time(ISEAT, ISC)}]},
+     {io_reopen_count, TIRC},
+     {io_reopen_count_details, [{rate, apply_factor(IRC, Factor)}]},
+     {mnesia_ram_tx_count, TMRTC},
+     {mnesia_ram_tx_count_details, [{rate, apply_factor(MRTC, Factor)}]},
+     {mnesia_disk_tx_count, TMDTC},
+     {mnesia_disk_tx_count_details, [{rate, apply_factor(MDTC, Factor)}]},
+     {msg_store_read_count, TMSRC},
+     {msg_store_read_count_details, [{rate, apply_factor(MSRC, Factor)}]},
+     {msg_store_write_count, TMSWC},
+     {msg_store_write_count_details, [{rate, apply_factor(MSWC, Factor)}]},
+     {queue_index_journal_write_count, TQIJWC},
+     {queue_index_journal_write_count_details, [{rate, apply_factor(QIJWC, Factor)}]},
+     {queue_index_write_count, TQIWC},
+     {queue_index_write_count_details, [{rate, apply_factor(QIWC, Factor)}]},
+     {queue_index_read_count, TQIRC},
+     {queue_index_read_count_details, [{rate, apply_factor(QIRC, Factor)}]},
+     {gc_num, TGC},
+     {gc_num_details, [{rate, apply_factor(GC, Factor)}]},
+     {gc_bytes_reclaimed, TGCW},
+     {gc_bytes_reclaimed_details, [{rate, apply_factor(GCW, Factor)}]},
+     {context_switches, TCS},
+     {context_switches_details, [{rate, apply_factor(CS, Factor)}]},
+     {io_file_handle_open_attempt_count, TIO},
+     {io_file_handle_open_attempt_count_details, [{rate, apply_factor(IO, Factor)}]},
+     {io_file_handle_open_attempt_avg_time, avg_time(TIOAT, TIO)},
+     {io_file_handle_open_attempt_avg_time_details, [{rate, avg_time(IOAT, IO)}]}
+    ];
+format_rate(coarse_node_node_stats, {_, S, R}, {_, TS, TR}, Factor) ->
+    [
+     {send_bytes, TS},
+     {send_bytes_details, [{rate, apply_factor(S, Factor)}]},
+     {send_bytes, TR},
+     {send_bytes_details, [{rate, apply_factor(R, Factor)}]}
+    ];
+format_rate(coarse_conn_stats, {_, R, S}, {_, TR, TS}, Factor) ->
+    [
+     {send_oct, TS},
+     {send_oct_details, [{rate, apply_factor(S, Factor)}]},
+     {recv_oct, TR},
+     {recv_oct_details, [{rate, apply_factor(R, Factor)}]}
+    ];
+format_rate(process_stats, {_, R}, {_, TR}, Factor) ->
+    [
+     {reductions, TR},
+     {reductions_details, [{rate, apply_factor(R, Factor)}]}
+    ].
+
+format_rate(deliver_get, {_, D, DN, G, GN}, {_, TD, TDN, TG, TGN},
+            {_, SD, SDN, SG, SGN}, Factor) ->
+    Length = length(SD),
+    [
+     {deliver, TD}, {deliver_details, [{rate, apply_factor(D, Factor)},
+                                       {samples, SD}] ++ average(SD, Length)},
+     {deliver_no_ack, TDN},
+     {deliver_no_ack_details, [{rate, apply_factor(DN, Factor)},
+                               {samples, SDN}] ++ average(SDN, Length)},
+     {get, TG}, {get_details, [{rate, apply_factor(G, Factor)},
+                               {samples, SG}] ++ average(SG, Length)},
+     {get_no_ack, TGN},
+     {get_no_ack_details, [{rate, apply_factor(GN, Factor)},
+                           {samples, SGN}] ++ average(SGN, Length)}
+    ];
+format_rate(fine_stats, {_, P, PI, PO, A, D, C, RU, R},
+            {_, TP, TPI, TPO, TA, TD, TC, TRU, TR},
+            {_, SP, SPI, SPO, SA, SD, SC, SRU, SR}, Factor) ->
+    Length = length(SP),
+    [
+     {publish, TP},
+     {publish_details, [{rate, apply_factor(P, Factor)},
+                        {samples, SP}] ++ average(SP, Length)},
+     {publish_in, TPI},
+     {publish_in_details, [{rate, apply_factor(PI, Factor)},
+                           {samples, SPI}] ++ average(SPI, Length)},
+     {publish_out, TPO},
+     {publish_out_details, [{rate, apply_factor(PO, Factor)},
+                            {samples, SPO}] ++ average(SPO, Length)},
+     {ack, TA}, {ack_details, [{rate, apply_factor(A, Factor)},
+                               {samples, SA}] ++ average(SA, Length)},
+     {deliver_get, TD},
+     {deliver_get_details, [{rate, apply_factor(D, Factor)},
+                            {samples, SD}] ++ average(SD, Length)},
+     {confirm, TC},
+     {confirm_details, [{rate, apply_factor(C, Factor)},
+                        {samples, SC}] ++ average(SC, Length)},
+     {return_unroutable, TRU},
+     {return_unroutable_details, [{rate, apply_factor(RU, Factor)},
+                                  {samples, SRU}] ++ average(SRU, Length)},
+     {redeliver, TR},
+     {redeliver_details, [{rate, apply_factor(R, Factor)},
+                          {samples, SR}] ++ average(SR, Length)}
+    ];
+format_rate(queue_msg_rates, {_, R, W}, {_, TR, TW}, {_, SR, SW}, Factor) ->
+    Length = length(SR),
+    [
+     {disk_reads, TR},
+     {disk_reads_details, [{rate, apply_factor(R, Factor)},
+                           {samples, SR}] ++ average(SR, Length)},
+     {disk_writes, TW},
+     {disk_writes_details, [{rate, apply_factor(W, Factor)},
+                            {samples, SW}] ++ average(SW, Length)}
+    ];
+format_rate(queue_msg_counts, {_, M, MR, MU}, {_, TM, TMR, TMU},
+            {_, SM, SMR, SMU}, Factor) ->
+    Length = length(SM),
+    [
+     {messages, TM},
+     {messages_details, [{rate, apply_factor(M, Factor)},
+                         {samples, SM}] ++ average(SM, Length)},
+     {messages_ready, TMR},
+     {messages_ready_details, [{rate, apply_factor(MR, Factor)},
+                               {samples, SMR}] ++ average(SMR, Length)},
+     {messages_unacknowledged, TMU},
+     {messages_unacknowledged_details, [{rate, apply_factor(MU, Factor)},
+                                        {samples, SMU}] ++ average(SMU, Length)}
+    ];
+format_rate(coarse_node_stats,
+            {_, M, F, S, P, D, IR, IB, IA, IWC, IWB, IWAT, IS, ISAT, ISC,
+             ISEAT, IRC, MRTC, MDTC, MSRC, MSWC, QIJWC, QIWC, QIRC, GC, GCW, CS,
+             IO, IOAT},
+            {_, TM, TF, TS, TP, TD, TIR, TIB, TIA, TIWC, TIWB, TIWAT, TIS,
+             TISAT, TISC, TISEAT, TIRC, TMRTC, TMDTC, TMSRC, TMSWC, TQIJWC,
+             TQIWC, TQIRC, TGC, TGCW, TCS, TIO, TIOAT},
+            {_, SM, SF, SS, SP, SD, SIR, SIB, SIA, SIWC, SIWB, SIWAT, SIS,
+             SISAT, SISC, SISEAT, SIRC, SMRTC, SMDTC, SMSRC, SMSWC, SQIJWC,
+             SQIWC, SQIRC, SGC, SGCW, SCS, SIO, SIOAT}, Factor) ->
+    %% Calculates average times for read/write/sync/seek from the
+    %% accumulated time and count.
+    %% io_<op>_avg_time is the average operation time for the life of the node.
+    %% io_<op>_avg_time_details/rate is the average operation time during the
+    %% last time unit calculated (thus similar to an instant rate).
+    %% io_<op>_avg_time_details/samples contain the average operation time
+    %% during each time unit requested.
+    %% io_<op>_avg_time_details/avg_rate is meaningless here, but we keep it
+    %% to maintain an uniform API with all the other metrics.
+    %% io_<op>_avg_time_details/avg is the average of the samples taken over
+    %% the requested period of time.
+    Length = length(SM),
+    [
+     {mem_used, TM},
+     {mem_used_details, [{rate, apply_factor(M, Factor)},
+                         {samples, SM}] ++ average(SM, Length)},
+     {fd_used, TF},
+     {fd_used_details, [{rate, apply_factor(F, Factor)},
+                        {samples, SF}] ++ average(SF, Length)},
+     {sockets_used, TS},
+     {sockets_used_details, [{rate, apply_factor(S, Factor)},
+                             {samples, SS}] ++ average(SS, Length)},
+     {proc_used, TP},
+     {proc_used_details, [{rate, apply_factor(P, Factor)},
+                          {samples, SP}] ++ average(SP, Length)},
+     {disk_free, TD},
+     {disk_free_details, [{rate, apply_factor(D, Factor)},
+                          {samples, SD}] ++ average(SD, Length)},
+     {io_read_count, TIR},
+     {io_read_count_details, [{rate, apply_factor(IR, Factor)},
+                              {samples, SIR}] ++ average(SIR, Length)},
+     {io_read_bytes, TIB},
+     {io_read_bytes_details, [{rate, apply_factor(IB, Factor)},
+                              {samples, SIB}] ++ average(SIB, Length)},
+     {io_read_avg_time, avg_time(TIA, TIR)},
+     {io_read_avg_time_details, [{rate, avg_time(IA, IR)},
+                                 {samples, SIA}] ++ average(SIA, Length)},
+     {io_write_count, TIWC},
+     {io_write_count_details, [{rate, apply_factor(IWC, Factor)},
+                               {samples, SIWC}] ++ average(SIWC, Length)},
+     {io_write_bytes, TIWB},
+     {io_write_bytes_details, [{rate, apply_factor(IWB, Factor)},
+                               {samples, SIWB}] ++ average(SIWB, Length)},
+     {io_write_avg_time, avg_time(TIWAT, TIWC)},
+     {io_write_avg_time_details, [{rate, avg_time(IWAT, TIWC)},
+                                  {samples, SIWAT}] ++ average(SIWAT, Length)},
+     {io_sync_count, TIS},
+     {io_sync_count_details, [{rate, apply_factor(IS, Factor)},
+                              {samples, SIS}] ++ average(SIS, Length)},
+     {io_sync_avg_time, avg_time(TISAT, TIS)},
+     {io_sync_avg_time_details, [{rate, avg_time(ISAT, IS)},
+                                 {samples, SISAT}] ++ average(SISAT, Length)},
+     {io_seek_count, TISC},
+     {io_seek_count_details, [{rate, apply_factor(ISC, Factor)},
+                              {samples, SISC}] ++ average(SISC, Length)},
+     {io_seek_avg_time, avg_time(TISEAT, TISC)},
+     {io_seek_avg_time_details, [{rate, avg_time(ISEAT, ISC)},
+                                 {samples, SISEAT}] ++ average(SISEAT, Length)},
+     {io_reopen_count, TIRC},
+     {io_reopen_count_details, [{rate, apply_factor(IRC, Factor)},
+                                {samples, SIRC}] ++ average(SIRC, Length)},
+     {mnesia_ram_tx_count, TMRTC},
+     {mnesia_ram_tx_count_details, [{rate, apply_factor(MRTC, Factor)},
+                                    {samples, SMRTC}] ++ average(SMRTC, Length)},
+     {mnesia_disk_tx_count, TMDTC},
+     {mnesia_disk_tx_count_details, [{rate, apply_factor(MDTC, Factor)},
+                                     {samples, SMDTC}] ++ average(SMDTC, Length)},
+     {msg_store_read_count, TMSRC},
+     {msg_store_read_count_details, [{rate, apply_factor(MSRC, Factor)},
+                                     {samples, SMSRC}] ++ average(SMSRC, Length)},
+     {msg_store_write_count, TMSWC},
+     {msg_store_write_count_details, [{rate, apply_factor(MSWC, Factor)},
+                                      {samples, SMSWC}] ++ average(SMSWC, Length)},
+     {queue_index_journal_write_count, TQIJWC},
+     {queue_index_journal_write_count_details,
+      [{rate, apply_factor(QIJWC, Factor)},
+       {samples, SQIJWC}] ++ average(SQIJWC, Length)},
+     {queue_index_write_count, TQIWC},
+     {queue_index_write_count_details, [{rate, apply_factor(QIWC, Factor)},
+                                        {samples, SQIWC}] ++ average(SQIWC, Length)},
+     {queue_index_read_count, TQIRC},
+     {queue_index_read_count_details, [{rate, apply_factor(QIRC, Factor)},
+                                       {samples, SQIRC}] ++ average(SQIRC, Length)},
+     {gc_num, TGC},
+     {gc_num_details, [{rate, apply_factor(GC, Factor)},
+                       {samples, SGC}] ++ average(SGC, Length)},
+     {gc_bytes_reclaimed, TGCW},
+     {gc_bytes_reclaimed_details, [{rate, apply_factor(GCW, Factor)},
+                                   {samples, SGCW}] ++ average(SGCW, Length)},
+     {context_switches, TCS},
+     {context_switches_details, [{rate, apply_factor(CS, Factor)},
+                                 {samples, SCS}] ++ average(SCS, Length)},
+     {io_file_handle_open_attempt_count, TIO},
+     {io_file_handle_open_attempt_count_details,
+      [{rate, apply_factor(IO, Factor)},
+       {samples, SIO}] ++ average(SIO, Length)},
+     {io_file_handle_open_attempt_avg_time, avg_time(TIOAT, TIO)},
+     {io_file_handle_open_attempt_avg_time_details,
+      [{rate, avg_time(IOAT, IO)},
+       {samples, SIOAT}] ++ average(SIOAT, Length)}
+    ];
+format_rate(coarse_node_node_stats, {_, S, R}, {_, TS, TR}, {_, SS, SR},
+            Factor) ->
+    Length = length(SS),
+    [
+     {send_bytes, TS},
+     {send_bytes_details, [{rate, apply_factor(S, Factor)},
+                           {samples, SS}] ++ average(SS, Length)},
+     {send_bytes, TR},
+     {send_bytes_details, [{rate, apply_factor(R, Factor)},
+                           {samples, SR}] ++ average(SR, Length)}
+    ];
+format_rate(coarse_conn_stats, {_, R, S}, {_, TR, TS}, {_, SR, SS},
+            Factor) ->
+    Length = length(SS),
+    [
+     {send_oct, TS},
+     {send_oct_details, [{rate, apply_factor(S, Factor)},
+                         {samples, SS}] ++ average(SS, Length)},
+     {recv_oct, TR},
+     {recv_oct_details, [{rate, apply_factor(R, Factor)},
+                         {samples, SR}] ++ average(SR, Length)}
+    ];
+format_rate(process_stats, {_, R}, {_, TR}, {_, SR}, Factor) ->
+    Length = length(SR),
+    [
+     {reductions, TR},
+     {reductions_details, [{rate, apply_factor(R, Factor)},
+                           {samples, SR}] ++ average(SR, Length)}
+    ].
+
+apply_factor(_, 0.0) ->
+    0.0;
+apply_factor(S, Factor) ->
+    S * 1000 / Factor.
+
+average(_Samples, Length) when Length =< 1 ->
+    [];
+average(Samples, Length) ->
+    [{sample, S2}, {timestamp, T2}] = hd(Samples),
+    [{sample, S1}, {timestamp, T1}] = lists:last(Samples),
+    Total = lists:sum([pget(sample, I) || I <- Samples]),
+    [{avg_rate, (S2 - S1) * 1000 / (T2 - T1)},
+     {avg,      Total / Length}].
+%%----------------------------------------------------------------------------
+
+add_record({Base, V1}, {_, V11}) ->
+    {Base, V1 + V11};
+add_record({Base, V1, V2}, {_, V11, V21}) ->
+    {Base, V1 + V11, V2 + V21};
+add_record({Base, V1, V2, V3}, {_, V1a, V2a, V3a}) ->
+    {Base, V1 + V1a, V2 + V2a, V3 + V3a};
+add_record({Base, V1, V2, V3, V4}, {_, V1a, V2a, V3a, V4a}) ->
+    {Base, V1 + V1a, V2 + V2a, V3 + V3a, V4 + V4a};
+add_record({Base, V1, V2, V3, V4, V5, V6, V7, V8},
+           {_, V1a, V2a, V3a, V4a, V5a, V6a, V7a, V8a}) ->
+    {Base, V1 + V1a, V2 + V2a, V3 + V3a, V4 + V4a, V5 + V5a, V6 + V6a, V7 + V7a,
+     V8 + V8a};
+add_record({Base, V1, V2, V3, V4, V5, V6, V7, V8, V9, V10, V11, V12, V13, V14,
+            V15, V16, V17, V18, V19, V20, V21, V22, V23, V24, V25, V26, V27, V28},
+           {_, V1a, V2a, V3a, V4a, V5a, V6a, V7a, V8a, V9a, V10a, V11a, V12a,
+            V13a, V14a, V15a, V16a, V17a, V18a, V19a, V20a, V21a, V22a, V23a,
+            V24a, V25a, V26a, V27a, V28a}) ->
+    {Base, V1 + V1a, V2 + V2a, V3 + V3a, V4 + V4a, V5 + V5a, V6 + V6a, V7 + V7a,
+     V8 + V8a, V9 + V9a, V10 + V10a, V11 + V11a, V12 + V12a, V13 + V13a,
+     V14 + V14a, V15 + V15a, V16 + V16a, V17 + V17a, V18 + V18a, V19 + V19a,
+     V20 + V20a, V21 + V21a, V22 + V22a, V23 + V23a, V24 + V24a, V25 + V25a,
+     V26 + V26a, V27 + V27a, V28 + V28a}.
+
+empty(Key, process_stats) ->
+    {Key, 0};
+empty(Key, Type) when Type == queue_msg_rates;
+                      Type == coarse_node_node_stats;
+                      Type == coarse_conn_stats ->
+    {Key, 0, 0};
+empty(Key, queue_msg_counts) ->
+    {Key, 0, 0, 0};
+empty(Key, deliver_get) ->
+    {Key, 0, 0, 0, 0};
+empty(Key, fine_stats) ->
+    {Key, 0, 0, 0, 0, 0, 0, 0, 0}; 
+empty(Key, coarse_node_stats) ->
+    {Key, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+     0, 0, 0, 0, 0}.
+
+empty_list(process_stats) ->
+    {samples, []};
+empty_list(Type) when Type == queue_msg_rates;
+                      Type == coarse_node_node_stats;
+                      Type == coarse_conn_stats ->
+    {samples, [], []};
+empty_list(queue_msg_counts) ->
+    {samples, [], [], []};
+empty_list(deliver_get) ->
+    {samples, [], [], [], []};
+empty_list(fine_stats) ->
+    {samples, [], [], [], [], [], [], [], []};
+empty_list(coarse_node_stats) ->
+    {samples, [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [],
+     [], [], [], [], [], [], [], [], [], [], [], []}.
+
+
+is_blank({_Key, 0}) ->
+    true;
+is_blank({_Key, 0, 0}) ->
+    true;
+is_blank({_Key, 0, 0, 0}) ->
+    true;
+is_blank({_Key, 0, 0, 0, 0}) ->
+    true;
+is_blank({_Key, 0, 0, 0, 0, 0, 0, 0, 0}) ->
+    true;
+is_blank({_Key, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+          0, 0, 0, 0, 0, 0, 0}) ->
+    true;
+is_blank(_) ->
+    false.
+
+avg_time(_Total, 0) ->
+    0.0;
+avg_time(Total, Count) ->
+    (Total / Count) / ?MICRO_TO_MILLI.
+
+avg_time(Total, Count, BaseTotal, BaseCount) ->
+    avg_time(Total - BaseTotal, Count - BaseCount).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_stats_gc.erl b/deps/rabbitmq_management/src/rabbit_mgmt_stats_gc.erl
new file mode 100644 (file)
index 0000000..44b5277
--- /dev/null
@@ -0,0 +1,219 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_stats_gc).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include("rabbit_mgmt_metrics.hrl").
+
+-behaviour(gen_server2).
+
+-export([start_link/1]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+         code_change/3, handle_pre_hibernate/1]).
+
+-export([name/1]).
+
+-import(rabbit_misc, [pget/3]).
+-import(rabbit_mgmt_db, [pget/2, id_name/1, id/2, lookup_element/2]).
+
+-record(state, {
+          interval,
+          gc_timer,
+          gc_table,
+          gc_index,
+          gc_next_key
+         }).
+
+-define(GC_INTERVAL, 5000).
+-define(GC_MIN_ROWS, 50).
+-define(GC_MIN_RATIO, 0.001).
+
+-define(DROP_LENGTH, 1000).
+
+-define(PROCESS_ALIVENESS_TIMEOUT, 15000).
+
+%%----------------------------------------------------------------------------
+%% API
+%%----------------------------------------------------------------------------
+
+start_link(Table) ->
+    case gen_server2:start_link({global, name(Table)}, ?MODULE, [Table], []) of
+        {ok, Pid} -> register(name(Table), Pid), %% [1]
+                     {ok, Pid};
+        Else      -> Else
+    end.
+%% [1] For debugging it's helpful to locally register the name too
+%% since that shows up in places global names don't.
+
+%%----------------------------------------------------------------------------
+%% Internal, gen_server2 callbacks
+%%----------------------------------------------------------------------------
+
+init([Table]) ->
+    {ok, Interval} = application:get_env(rabbit, collect_statistics_interval),
+    rabbit_log:info("Statistics garbage collector started for table ~p with interval ~p.~n", [Table, Interval]),
+    {ok, set_gc_timer(#state{interval = Interval,
+                             gc_table = Table,
+                             gc_index = rabbit_mgmt_stats_tables:key_index(Table)}),
+     hibernate,
+     {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
+
+handle_call(_Request, _From, State) ->
+    reply(not_understood, State).
+
+handle_cast(_Request, State) ->
+    noreply(State).
+
+handle_info(gc, State) ->
+    noreply(set_gc_timer(gc_batch(State)));
+
+handle_info(_Info, State) ->
+    noreply(State).
+
+terminate(_Arg, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+reply(Reply, NewState) -> {reply, Reply, NewState, hibernate}.
+noreply(NewState) -> {noreply, NewState, hibernate}.
+
+set_gc_timer(State) ->
+    TRef = erlang:send_after(?GC_INTERVAL, self(), gc),
+    State#state{gc_timer = TRef}.
+
+handle_pre_hibernate(State) ->
+    {hibernate, State}.
+
+%%----------------------------------------------------------------------------
+%% Internal, utilities
+%%----------------------------------------------------------------------------
+
+floor(TS, #state{interval = Interval}) ->
+    rabbit_mgmt_util:floor(TS, Interval).
+
+%%----------------------------------------------------------------------------
+%% Internal, event-GCing
+%%----------------------------------------------------------------------------
+
+gc_batch(#state{gc_index = Index} = State) ->
+    {ok, Policies} = application:get_env(
+                       rabbitmq_management, sample_retention_policies),
+    {ok, ProcGCTimeout} = application:get_env(
+                            rabbitmq_management, process_stats_gc_timeout),
+    Config = [{policies, Policies}, {process_stats_gc_timeout, ProcGCTimeout}],
+    Total = ets:info(Index, size),
+    Rows = erlang:max(erlang:min(Total, ?GC_MIN_ROWS), round(?GC_MIN_RATIO * Total)),
+    gc_batch(Rows, Config, State).
+
+gc_batch(0, _Config, State) ->
+    State;
+gc_batch(Rows, Config, State = #state{gc_next_key = Cont,
+                                        gc_table = Table,
+                                        gc_index = Index}) ->
+    Select = case Cont of
+                 undefined ->
+                     ets:first(Index);
+                 _ ->
+                     ets:next(Index, Cont)
+             end,
+    NewCont = case Select of
+                  '$end_of_table' ->
+                      undefined;
+                  Key ->
+                      Now = floor(
+                              time_compat:os_system_time(milli_seconds),
+                              State),
+                      gc(Key, Table, Config, Now),
+                      Key
+              end,
+    gc_batch(Rows - 1, Config, State#state{gc_next_key = NewCont}).
+
+gc(Key, Table, Config, Now) ->
+    case lists:member(Table, ?PROC_STATS_TABLES) of
+        true  -> gc_proc(Key, Table, Config, Now);
+        false -> gc_aggr(Key, Table, Config, Now)
+    end.
+
+gc_proc(Key, Table, Config, Now) when Table == connection_stats;
+                                 Table == channel_stats ->
+    Timeout = pget(process_stats_gc_timeout, Config),
+    case ets:lookup(Table, {Key, stats}) of
+        %% Key is already cleared. Skipping
+        []                           -> ok;
+        [{{Key, stats}, _Stats, TS}] -> maybe_gc_process(Key, Table,
+                                                         TS, Now, Timeout)
+    end.
+
+gc_aggr(Key, Table, Config, Now) ->
+    Policies = pget(policies, Config),
+    Policy   = pget(retention_policy(Table), Policies),
+    rabbit_mgmt_stats:gc({Policy, Now}, Table, Key).
+
+maybe_gc_process(Pid, Table, LastStatsTS, Now, Timeout) ->
+    case Now - LastStatsTS < Timeout of
+        true  -> ok;
+        false ->
+            case process_status(Pid) of
+                %% Process doesn't exist on remote node
+                undefined -> rabbit_event:notify(deleted_event(Table),
+                                                 [{pid, Pid}]);
+                %% Remote node is unreachable or process is alive
+                _        -> ok
+            end
+    end.
+
+process_status(Pid) when node(Pid) =:= node() ->
+    process_info(Pid, status);
+process_status(Pid) ->
+    rpc:block_call(node(Pid), erlang, process_info, [Pid, status],
+                   ?PROCESS_ALIVENESS_TIMEOUT).
+
+deleted_event(channel_stats)    -> channel_closed;
+deleted_event(connection_stats) -> connection_closed.
+
+retention_policy(aggr_node_stats_coarse_node_stats) -> global;
+retention_policy(aggr_node_node_stats_coarse_node_node_stats) -> global;
+retention_policy(aggr_vhost_stats_deliver_get) -> global;
+retention_policy(aggr_vhost_stats_fine_stats) -> global;
+retention_policy(aggr_vhost_stats_queue_msg_rates) -> global;
+retention_policy(aggr_vhost_stats_msg_rates_details) -> global;
+retention_policy(aggr_vhost_stats_queue_msg_counts) -> global;
+retention_policy(aggr_vhost_stats_coarse_conn_stats) -> global;
+retention_policy(aggr_queue_stats_fine_stats) -> basic;
+retention_policy(aggr_queue_stats_deliver_get) -> basic;
+retention_policy(aggr_queue_stats_queue_msg_counts) -> basic;
+retention_policy(aggr_queue_stats_queue_msg_rates) -> basic;
+retention_policy(aggr_queue_stats_process_stats) -> basic;
+retention_policy(aggr_exchange_stats_fine_stats) -> basic;
+retention_policy(aggr_connection_stats_coarse_conn_stats) -> basic;
+retention_policy(aggr_connection_stats_process_stats) -> basic;
+retention_policy(aggr_channel_stats_deliver_get) -> basic;
+retention_policy(aggr_channel_stats_fine_stats) -> basic;
+retention_policy(aggr_channel_stats_queue_msg_counts) -> basic;
+retention_policy(aggr_channel_stats_process_stats) -> basic;
+retention_policy(aggr_queue_exchange_stats_fine_stats)   -> detailed;
+retention_policy(aggr_channel_exchange_stats_deliver_get) -> detailed;
+retention_policy(aggr_channel_exchange_stats_fine_stats) -> detailed;
+retention_policy(aggr_channel_queue_stats_deliver_get) -> detailed;
+retention_policy(aggr_channel_queue_stats_fine_stats) -> detailed;
+retention_policy(aggr_channel_queue_stats_queue_msg_counts) -> detailed.
+
+name(Atom) ->
+    list_to_atom((atom_to_list(Atom) ++ "_gc")).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_stats_tables.erl b/deps/rabbitmq_management/src/rabbit_mgmt_stats_tables.erl
new file mode 100644 (file)
index 0000000..2505280
--- /dev/null
@@ -0,0 +1,271 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ.
+%%
+%%   The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_stats_tables).
+
+-include("rabbit_mgmt_metrics.hrl").
+
+-export([aggr_table/2, aggr_tables/1, type_from_table/1,
+         index/1, key_index/1]).
+
+-spec aggr_table(event_type(), type()) -> table_name().
+aggr_table(queue_stats, deliver_get) ->
+    aggr_queue_stats_deliver_get;
+aggr_table(queue_stats, fine_stats) ->
+    aggr_queue_stats_fine_stats;
+aggr_table(queue_stats, queue_msg_counts) ->
+    aggr_queue_stats_queue_msg_counts;
+aggr_table(queue_stats, queue_msg_rates) ->
+    aggr_queue_stats_queue_msg_rates;
+aggr_table(queue_stats, process_stats) ->
+    aggr_queue_stats_process_stats;
+aggr_table(queue_exchange_stats, fine_stats) ->
+    aggr_queue_exchange_stats_fine_stats;
+aggr_table(vhost_stats, deliver_get) ->
+    aggr_vhost_stats_deliver_get;
+aggr_table(vhost_stats, fine_stats) ->
+    aggr_vhost_stats_fine_stats;
+aggr_table(vhost_stats, queue_msg_rates) ->
+    aggr_vhost_stats_queue_msg_rates;
+aggr_table(vhost_stats, queue_msg_counts) ->
+    aggr_vhost_stats_queue_msg_counts;
+aggr_table(vhost_stats, coarse_conn_stats) ->
+    aggr_vhost_stats_coarse_conn_stats;
+aggr_table(channel_queue_stats, deliver_get) ->
+    aggr_channel_queue_stats_deliver_get;
+aggr_table(channel_queue_stats, fine_stats) ->
+    aggr_channel_queue_stats_fine_stats;
+aggr_table(channel_queue_stats, queue_msg_counts) ->
+    aggr_channel_queue_stats_queue_msg_counts;
+aggr_table(channel_stats, deliver_get) ->
+    aggr_channel_stats_deliver_get;
+aggr_table(channel_stats, fine_stats) ->
+    aggr_channel_stats_fine_stats;
+aggr_table(channel_stats, queue_msg_counts) ->
+    aggr_channel_stats_queue_msg_counts;
+aggr_table(channel_stats, process_stats) ->
+    aggr_channel_stats_process_stats;
+aggr_table(channel_exchange_stats, deliver_get) ->
+    aggr_channel_exchange_stats_deliver_get;
+aggr_table(channel_exchange_stats, fine_stats) ->
+    aggr_channel_exchange_stats_fine_stats;
+aggr_table(exchange_stats, fine_stats) ->
+    aggr_exchange_stats_fine_stats;
+aggr_table(node_stats, coarse_node_stats) ->
+    aggr_node_stats_coarse_node_stats;
+aggr_table(node_node_stats, coarse_node_node_stats) ->
+    aggr_node_node_stats_coarse_node_node_stats;
+aggr_table(connection_stats, coarse_conn_stats) ->
+    aggr_connection_stats_coarse_conn_stats;
+aggr_table(connection_stats, process_stats) ->
+    aggr_connection_stats_process_stats.
+
+-spec aggr_tables(event_type()) -> [{table_name(), type()}].
+aggr_tables(queue_stats) ->
+    [{aggr_queue_stats_fine_stats, fine_stats},
+     {aggr_queue_stats_deliver_get, deliver_get},
+     {aggr_queue_stats_queue_msg_counts, queue_msg_counts},
+     {aggr_queue_stats_queue_msg_rates, queue_msg_rates},
+     {aggr_queue_stats_process_stats, process_stats}];
+aggr_tables(queue_exchange_stats) ->
+    [{aggr_queue_exchange_stats_fine_stats, fine_stats}];
+aggr_tables(vhost_stats) ->
+    [{aggr_vhost_stats_deliver_get, deliver_get},
+     {aggr_vhost_stats_fine_stats, fine_stats},
+     {aggr_vhost_stats_queue_msg_rates, queue_msg_rates},
+     {aggr_vhost_stats_queue_msg_counts, queue_msg_counts},
+     {aggr_vhost_stats_coarse_conn_stats, coarse_conn_stats}];
+aggr_tables(channel_queue_stats) ->
+    [{aggr_channel_queue_stats_deliver_get, deliver_get},
+     {aggr_channel_queue_stats_fine_stats, fine_stats},
+     {aggr_channel_queue_stats_queue_msg_counts, queue_msg_counts}];
+aggr_tables(channel_stats) ->
+    [{aggr_channel_stats_deliver_get, deliver_get},
+     {aggr_channel_stats_fine_stats, fine_stats},
+     {aggr_channel_stats_queue_msg_counts, queue_msg_counts},
+     {aggr_channel_stats_process_stats, process_stats}];
+aggr_tables(channel_exchange_stats) ->
+    [{aggr_channel_exchange_stats_deliver_get, deliver_get},
+     {aggr_channel_exchange_stats_fine_stats, fine_stats}];
+aggr_tables(exchange_stats) ->
+    [{aggr_exchange_stats_fine_stats, fine_stats}];
+aggr_tables(node_stats) ->
+    [{aggr_node_stats_coarse_node_stats, coarse_node_stats}];
+aggr_tables(node_node_stats) ->
+    [{aggr_node_node_stats_coarse_node_node_stats, coarse_node_node_stats}];
+aggr_tables(connection_stats) ->
+    [{aggr_connection_stats_coarse_conn_stats, coarse_conn_stats},
+     {aggr_connection_stats_process_stats, process_stats}].
+
+-spec type_from_table(table_name()) -> type().
+type_from_table(aggr_queue_stats_deliver_get) ->
+    deliver_get;
+type_from_table(aggr_queue_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_queue_stats_queue_msg_counts) ->
+    queue_msg_counts;
+type_from_table(aggr_queue_stats_queue_msg_rates) ->
+    queue_msg_rates;
+type_from_table(aggr_queue_stats_process_stats) ->
+    process_stats;
+type_from_table(aggr_queue_exchange_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_vhost_stats_deliver_get) ->
+    deliver_get;
+type_from_table(aggr_vhost_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_vhost_stats_queue_msg_rates) ->
+    queue_msg_rates;
+type_from_table(aggr_vhost_stats_queue_msg_counts) ->
+    queue_msg_counts;
+type_from_table(aggr_vhost_stats_coarse_conn_stats) ->
+    coarse_conn_stats;
+type_from_table(aggr_channel_queue_stats_deliver_get) ->
+    deliver_get;
+type_from_table(aggr_channel_queue_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_channel_queue_stats_queue_msg_counts) ->
+    queue_msg_counts;
+type_from_table(aggr_channel_stats_deliver_get) ->
+    deliver_get;
+type_from_table(aggr_channel_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_channel_stats_queue_msg_counts) ->
+    queue_msg_counts;
+type_from_table(aggr_channel_stats_process_stats) ->
+    process_stats;
+type_from_table(aggr_channel_exchange_stats_deliver_get) ->
+    deliver_get;
+type_from_table(aggr_channel_exchange_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_exchange_stats_fine_stats) ->
+    fine_stats;
+type_from_table(aggr_node_stats_coarse_node_stats) ->
+    coarse_node_stats;
+type_from_table(aggr_node_node_stats_coarse_node_node_stats) ->
+    coarse_node_node_stats;
+type_from_table(aggr_node_node_stats_coarse_conn_stats) ->
+    coarse_conn_stats;
+type_from_table(aggr_connection_stats_coarse_conn_stats) ->
+    coarse_conn_stats;
+type_from_table(aggr_connection_stats_process_stats) ->
+    process_stats.
+
+index(aggr_queue_stats_deliver_get) ->
+    aggr_queue_stats_deliver_get_index;
+index(aggr_queue_stats_fine_stats) ->
+    aggr_queue_stats_fine_stats_index;
+index(aggr_queue_stats_queue_msg_counts) ->
+    aggr_queue_stats_queue_msg_counts_index;
+index(aggr_queue_stats_queue_msg_rates) ->
+    aggr_queue_stats_queue_msg_rates_index;
+index(aggr_queue_stats_process_stats) ->
+    aggr_queue_stats_process_stats_index;
+index(aggr_queue_exchange_stats_fine_stats) ->
+    aggr_queue_exchange_stats_fine_stats_index;
+index(aggr_vhost_stats_deliver_get) ->
+    aggr_vhost_stats_deliver_get_index;
+index(aggr_vhost_stats_fine_stats) ->
+    aggr_vhost_stats_fine_stats_index;
+index(aggr_vhost_stats_queue_msg_rates) ->
+    aggr_vhost_stats_queue_msg_rates_index;
+index(aggr_vhost_stats_queue_msg_counts) ->
+    aggr_vhost_stats_queue_msg_counts_index;
+index(aggr_vhost_stats_coarse_conn_stats) ->
+    aggr_vhost_stats_coarse_conn_stats_index;
+index(aggr_channel_queue_stats_deliver_get) ->
+    aggr_channel_queue_stats_deliver_get_index;
+index(aggr_channel_queue_stats_fine_stats) ->
+    aggr_channel_queue_stats_fine_stats_index;
+index(aggr_channel_queue_stats_queue_msg_counts) ->
+    aggr_channel_queue_stats_queue_msg_counts_index;
+index(aggr_channel_stats_deliver_get) ->
+    aggr_channel_stats_deliver_get_index;
+index(aggr_channel_stats_fine_stats) ->
+    aggr_channel_stats_fine_stats_index;
+index(aggr_channel_stats_queue_msg_counts) ->
+    aggr_channel_stats_queue_msg_counts_index;
+index(aggr_channel_stats_process_stats) ->
+    aggr_channel_stats_process_stats_index;
+index(aggr_channel_exchange_stats_deliver_get) ->
+    aggr_channel_exchange_stats_deliver_get_index;
+index(aggr_channel_exchange_stats_fine_stats) ->
+    aggr_channel_exchange_stats_fine_stats_index;
+index(aggr_exchange_stats_fine_stats) ->
+    aggr_exchange_stats_fine_stats_index;
+index(aggr_node_stats_coarse_node_stats) ->
+    aggr_node_stats_coarse_node_stats_index;
+index(aggr_node_node_stats_coarse_node_node_stats) ->
+    aggr_node_node_stats_coarse_node_node_stats_index;
+index(aggr_connection_stats_coarse_conn_stats) ->
+    aggr_connection_stats_coarse_conn_stats_index;
+index(aggr_connection_stats_process_stats) ->
+    aggr_connection_stats_process_stats_index.
+
+key_index(connection_stats) ->
+    connection_stats_key_index;
+key_index(channel_stats) ->
+    channel_stats_key_index;
+key_index(aggr_queue_stats_deliver_get) ->
+    aggr_queue_stats_deliver_get_key_index;
+key_index(aggr_queue_stats_fine_stats) ->
+    aggr_queue_stats_fine_stats_key_index;
+key_index(aggr_queue_stats_queue_msg_counts) ->
+    aggr_queue_stats_queue_msg_counts_key_index;
+key_index(aggr_queue_stats_queue_msg_rates) ->
+    aggr_queue_stats_queue_msg_rates_key_index;
+key_index(aggr_queue_stats_process_stats) ->
+    aggr_queue_stats_process_stats_key_index;
+key_index(aggr_queue_exchange_stats_fine_stats) ->
+    aggr_queue_exchange_stats_fine_stats_key_index;
+key_index(aggr_vhost_stats_deliver_get) ->
+    aggr_vhost_stats_deliver_get_key_index;
+key_index(aggr_vhost_stats_fine_stats) ->
+    aggr_vhost_stats_fine_stats_key_index;
+key_index(aggr_vhost_stats_queue_msg_rates) ->
+    aggr_vhost_stats_queue_msg_rates_key_index;
+key_index(aggr_vhost_stats_queue_msg_counts) ->
+    aggr_vhost_stats_queue_msg_counts_key_index;
+key_index(aggr_vhost_stats_coarse_conn_stats) ->
+    aggr_vhost_stats_coarse_conn_stats_key_index;
+key_index(aggr_channel_queue_stats_deliver_get) ->
+    aggr_channel_queue_stats_deliver_get_key_index;
+key_index(aggr_channel_queue_stats_fine_stats) ->
+    aggr_channel_queue_stats_fine_stats_key_index;
+key_index(aggr_channel_queue_stats_queue_msg_counts) ->
+    aggr_channel_queue_stats_queue_msg_counts_key_index;
+key_index(aggr_channel_stats_deliver_get) ->
+    aggr_channel_stats_deliver_get_key_index;
+key_index(aggr_channel_stats_fine_stats) ->
+    aggr_channel_stats_fine_stats_key_index;
+key_index(aggr_channel_stats_queue_msg_counts) ->
+    aggr_channel_stats_queue_msg_counts_key_index;
+key_index(aggr_channel_stats_process_stats) ->
+    aggr_channel_stats_process_stats_key_index;
+key_index(aggr_channel_exchange_stats_deliver_get) ->
+    aggr_channel_exchange_stats_deliver_get_key_index;
+key_index(aggr_channel_exchange_stats_fine_stats) ->
+    aggr_channel_exchange_stats_fine_stats_key_index;
+key_index(aggr_exchange_stats_fine_stats) ->
+    aggr_exchange_stats_fine_stats_key_index;
+key_index(aggr_node_stats_coarse_node_stats) ->
+    aggr_node_stats_coarse_node_stats_key_index;
+key_index(aggr_node_node_stats_coarse_node_node_stats) ->
+    aggr_node_node_stats_coarse_node_node_stats_key_index;
+key_index(aggr_connection_stats_coarse_conn_stats) ->
+    aggr_connection_stats_coarse_conn_stats_key_index;
+key_index(aggr_connection_stats_process_stats) ->
+    aggr_connection_stats_process_stats_key_index.
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_sup.erl b/deps/rabbitmq_management/src/rabbit_mgmt_sup.erl
new file mode 100644 (file)
index 0000000..326b65e
--- /dev/null
@@ -0,0 +1,50 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Console.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2011-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_sup).
+
+-behaviour(mirrored_supervisor).
+
+-export([init/1]).
+-export([start_link/0]).
+
+-include("rabbit_mgmt_metrics.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+init([]) ->
+    COLLECTOR = {rabbit_mgmt_event_collector,
+                 {rabbit_mgmt_event_collector, start_link, []},
+                 permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_event_collector]},
+    CCOLLECTOR = {rabbit_mgmt_channel_stats_collector,
+                  {rabbit_mgmt_channel_stats_collector, start_link, []},
+                  permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_channel_stats_collector]},
+    QCOLLECTOR = {rabbit_mgmt_queue_stats_collector,
+                  {rabbit_mgmt_queue_stats_collector, start_link, []},
+                  permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_queue_stats_collector]},
+    GC = [{rabbit_mgmt_stats_gc:name(Table), {rabbit_mgmt_stats_gc, start_link, [Table]},
+           permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_stats_gc]}
+          || Table <- ?AGGR_TABLES],
+    ProcGC = [{rabbit_mgmt_stats_gc:name(Table), {rabbit_mgmt_stats_gc, start_link, [Table]},
+           permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_stats_gc]}
+          || Table <- ?PROC_STATS_TABLES],
+    DB = {rabbit_mgmt_db, {rabbit_mgmt_db, start_link, []},
+          permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_db]},
+    {ok, {{one_for_one, 10, 10}, [COLLECTOR, CCOLLECTOR, QCOLLECTOR, DB] ++ GC ++ ProcGC}}.
+
+start_link() ->
+     mirrored_supervisor:start_link(
+       {local, ?MODULE}, ?MODULE, fun rabbit_misc:execute_mnesia_transaction/1,
+       ?MODULE, []).
similarity index 82%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup_sup.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl
index 6758a5d3e23177f6258acaee11d646950c82d327..64f4674efe86a62814539d4823c0cc3d7d552e33 100644 (file)
@@ -57,8 +57,15 @@ start_child() -> supervisor2:start_child( ?MODULE, sup()).
 %%----------------------------------------------------------------------------
 
 init([]) ->
-    {ok, {{one_for_one, 0, 1}, [sup()]}}.
+    %% see above as well as https://github.com/rabbitmq/rabbitmq-management/pull/84.
+    %% we sent a message to ourselves so that if there's a conflict
+    %% with the mirrored supervisor already being started on another node,
+    %% we fail and let the other node win in a way that doesn't
+    %% prevent rabbitmq_management and, in turn, the entire
+    %% node fail to start.
+    timer:apply_after(0, ?MODULE, start_child, []),
+    {ok, {{one_for_one, 0, 1}, []}}.
 
 sup() ->
     {rabbit_mgmt_sup, {rabbit_mgmt_sup, start_link, []},
-     temporary, ?MAX_WAIT, supervisor, [rabbit_mgmt_sup]}.
+     temporary, ?SUPERVISOR_WAIT, supervisor, [rabbit_mgmt_sup]}.
similarity index 74%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_util.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_util.erl
index 2a151017339a360c8e1bc941cda43f498792ef0d..939cf674b816a833501211bc71d9dac7b26766c0 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_util).
@@ -19,7 +19,7 @@
 %% TODO sort all this out; maybe there's scope for rabbit_mgmt_request?
 
 -export([is_authorized/2, is_authorized_admin/2, is_authorized_admin/4,
-         vhost/1]).
+         vhost/1, vhost_from_headers/1]).
 -export([is_authorized_vhost/2, is_authorized_user/3,
          is_authorized_monitor/2, is_authorized_policies/2]).
 -export([bad_request/3, bad_request_exception/4, id/2, parse_bool/1,
 -export([filter_conn_ch_list/3, filter_user/2, list_login_vhosts/2]).
 -export([with_decode/5, decode/1, decode/2, redirect/2, set_resp_header/3,
          args/1]).
--export([reply_list/3, reply_list/4, sort_list/2, destination_type/1]).
+-export([reply_list/3, reply_list/5, reply_list/4,
+         sort_list/2, destination_type/1, reply_list_or_paginate/3]).
 -export([post_respond/1, columns/1, is_monitor/1]).
 -export([list_visible_vhosts/1, b64decode_or_throw/1, no_range/0, range/1,
          range_ceil/1, floor/2, ceil/2]).
+-export([pagination_params/1, maybe_filter_by_keyword/4,
+         get_value_param/2]).
 
 -import(rabbit_misc, [pget/2, pget/3]).
 
 -include_lib("webmachine/include/wm_reqstate.hrl").
 
 -define(FRAMING, rabbit_framing_amqp_0_9_1).
+-define(DEFAULT_PAGE_SIZE, 100).
+-define(MAX_PAGE_SIZE, 500).
+-record(pagination, {page = undefined, page_size = undefined,
+                    name = undefined, use_regex = undefined}).
+
+-define(MAX_RANGE, 500).
 
 %%--------------------------------------------------------------------
 
@@ -118,7 +127,11 @@ is_authorized(ReqData, Context, Username, Password, ErrorMsg, Fun) ->
                                         [Username, Msg]),
                      not_authorised(Msg, ReqData, Context)
              end,
-    case rabbit_access_control:check_user_pass_login(Username, Password) of
+    AuthProps = [{password, Password}] ++ case vhost(ReqData) of
+        VHost when is_binary(VHost) -> [{vhost, VHost}];
+        _                           -> []
+    end,
+    case rabbit_access_control:check_user_login(Username, AuthProps) of
         {ok, User = #user{tags = Tags}} ->
             IP = peer(ReqData),
             case rabbit_access_control:check_user_loopback(Username, IP) of
@@ -157,13 +170,21 @@ peersock(ReqData) ->
 peername(Sock) when is_port(Sock) -> inet:peername(Sock);
 peername({ssl, SSL})              -> ssl:peername(SSL).
 
+vhost_from_headers(ReqData) ->
+    case wrq:get_req_header(<<"x-vhost">>, ReqData) of
+        undefined -> none;
+        %% blank x-vhost means "All hosts" is selected in the UI
+        []        -> none;
+        VHost     -> list_to_binary(VHost)
+    end.
+
 vhost(ReqData) ->
     case id(vhost, ReqData) of
-        none  -> none;
-        VHost -> case rabbit_vhost:exists(VHost) of
-                     true  -> VHost;
-                     false -> not_found
-                 end
+      none  -> vhost_from_headers(ReqData);
+      VHost -> case rabbit_vhost:exists(VHost) of
+                true  -> VHost;
+                false -> not_found
+               end
     end.
 
 destination_type(ReqData) ->
@@ -178,41 +199,183 @@ reply(Facts, ReqData, Context) ->
 reply0(Facts, ReqData, Context) ->
     ReqData1 = set_resp_header("Cache-Control", "no-cache", ReqData),
     try
-        {mochijson2:encode(Facts), ReqData1, Context}
+        {mochijson2:encode(rabbit_mgmt_format:format_nulls(Facts)), ReqData1,
+        Context}
     catch exit:{json_encode, E} ->
             Error = iolist_to_binary(
                       io_lib:format("JSON encode error: ~p", [E])),
             Reason = iolist_to_binary(
-                       io_lib:format("While encoding:~n~p", [Facts])),
+                       io_lib:format("While encoding: ~n~p", [Facts])),
             internal_server_error(Error, Reason, ReqData1, Context)
     end.
 
 reply_list(Facts, ReqData, Context) ->
-    reply_list(Facts, ["vhost", "name"], ReqData, Context).
+    reply_list(Facts, ["vhost", "name"], ReqData, Context, undefined).
 
 reply_list(Facts, DefaultSorts, ReqData, Context) ->
-    reply(sort_list(
-            extract_columns_list(Facts, ReqData),
-            DefaultSorts,
-            wrq:get_qs_value("sort", ReqData),
-            wrq:get_qs_value("sort_reverse", ReqData)),
-          ReqData, Context).
+    reply_list(Facts, DefaultSorts, ReqData, Context, undefined).
+
+
+reply_list(Facts, DefaultSorts, ReqData, Context, Pagination) ->
+    SortList =
+       sort_list(
+          extract_columns_list(Facts, ReqData),
+          DefaultSorts,
+          wrq:get_qs_value("sort", ReqData),
+          wrq:get_qs_value("sort_reverse", ReqData), Pagination),
 
-sort_list(Facts, Sorts) -> sort_list(Facts, Sorts, undefined, false).
+    reply(SortList, ReqData, Context).
+
+
+reply_list_or_paginate(Facts, ReqData, Context) ->
+    try
+        Pagination = pagination_params(ReqData),
+        reply_list(Facts, ["vhost", "name"], ReqData, Context, Pagination)
+    catch error:badarg ->
+           Reason = iolist_to_binary(
+                      io_lib:format("Pagination parameters are invalid", [])),
+           invalid_pagination(bad_request, Reason, ReqData, Context);
+         {error, ErrorType, S} ->
+            Reason = iolist_to_binary(S),
+            invalid_pagination(ErrorType, Reason, ReqData, Context)
+    end.
 
-sort_list(Facts, DefaultSorts, Sort, Reverse) ->
+
+sort_list(Facts, Sorts) -> sort_list(Facts, Sorts, undefined, false,
+  undefined).
+
+sort_list(Facts, DefaultSorts, Sort, Reverse, Pagination) ->
     SortList = case Sort of
-               undefined -> DefaultSorts;
-               Extra     -> [Extra | DefaultSorts]
-           end,
+                  undefined -> DefaultSorts;
+                  Extra     -> [Extra | DefaultSorts]
+              end,
     %% lists:sort/2 is much more expensive than lists:sort/1
     Sorted = [V || {_K, V} <- lists:sort(
                                 [{sort_key(F, SortList), F} || F <- Facts])],
-    case Reverse of
-        "true" -> lists:reverse(Sorted);
-        _      -> Sorted
+
+    ContextList = maybe_filter_context(Sorted, Pagination),
+    range_filter(maybe_reverse(ContextList, Reverse), Pagination, Sorted).
+
+%%
+%% Filtering functions
+%%
+
+
+maybe_filter_context(List, #pagination{name = Name, use_regex = UseRegex}) when
+      is_list(Name) ->
+    lists:filter(fun(ListF) ->
+                        maybe_filter_by_keyword(name, Name, ListF, UseRegex) 
+                end, 
+                List);
+%% Here it is backward with the other API(s), that don't filter the data
+maybe_filter_context(List, _) ->
+    List.
+
+
+match_value({_, Value}, ValueTag, UseRegex) when UseRegex =:= "true" ->
+    case re:run(Value, ValueTag, [caseless]) of
+        {match, _} -> true;
+        nomatch ->  false
+    end;
+match_value({_, Value}, ValueTag, _) ->
+    Pos = string:str(string:to_lower(binary_to_list(Value)),
+        string:to_lower(ValueTag)),
+    case Pos of
+        Pos  when Pos > 0 -> true;
+        _ -> false
+    end.
+
+maybe_filter_by_keyword(KeyTag, ValueTag, List, UseRegex) when
+      is_list(ValueTag), length(ValueTag) > 0 ->
+    match_value(lists:keyfind(KeyTag, 1, List), ValueTag, UseRegex);
+maybe_filter_by_keyword(_, _, _, _) ->
+    true.
+
+check_request_param(V, ReqData) ->
+    case wrq:get_qs_value(V, ReqData) of
+       undefined -> undefined;
+       Str       -> list_to_integer(Str)
+    end.
+
+get_value_param(V, ReqData) ->
+    wrq:get_qs_value(V, ReqData).
+
+%% Validates and returns pagination parameters:
+%% Page is assumed to be > 0, PageSize > 0 PageSize <= ?MAX_PAGE_SIZE
+pagination_params(ReqData) ->
+    PageNum  = check_request_param("page", ReqData),
+    PageSize = check_request_param("page_size", ReqData),
+    Name = get_value_param("name", ReqData),
+    UseRegex = get_value_param("use_regex", ReqData),
+    case {PageNum, PageSize} of
+        {undefined, _} ->
+            undefined;
+       {PageNum, undefined} when is_integer(PageNum) andalso PageNum > 0 ->
+            #pagination{page = PageNum, page_size = ?DEFAULT_PAGE_SIZE,
+                name =  Name, use_regex = UseRegex};
+        {PageNum, PageSize}  when is_integer(PageNum) 
+                                  andalso is_integer(PageSize)
+                                  andalso (PageNum > 0)
+                                  andalso (PageSize > 0)
+                                  andalso (PageSize =< ?MAX_PAGE_SIZE) ->
+            #pagination{page = PageNum, page_size = PageSize,
+                name =  Name, use_regex = UseRegex};
+        _ -> throw({error, invalid_pagination_parameters,
+                    io_lib:format("Invalid pagination parameters: page number ~p, page size ~p",
+                                  [PageNum, PageSize])})
     end.
 
+maybe_reverse([], _) ->
+    [];
+maybe_reverse(RangeList, "true") when is_list(RangeList) ->
+    lists:reverse(RangeList);
+maybe_reverse(RangeList, true) when is_list(RangeList) ->
+    lists:reverse(RangeList);
+maybe_reverse(RangeList, _) ->
+    RangeList.
+
+%% for backwards compatibility, does not filter the list
+range_filter(List, undefined, _)
+      -> List;
+
+range_filter(List, RP = #pagination{page = PageNum, page_size = PageSize}, 
+            TotalElements) ->
+    Offset = (PageNum - 1) * PageSize + 1,
+    try
+        range_response(lists:sublist(List, Offset, PageSize), RP, List, 
+                      TotalElements)
+    catch
+        error:function_clause ->
+            Reason = io_lib:format(
+                      "Page out of range, page: ~p page size: ~p, len: ~p",
+                      [PageNum, PageSize, length(List)]),
+            throw({error, page_out_of_range, Reason})
+    end.
+
+%% Injects pagination information into
+range_response([], #pagination{page = PageNum, page_size = PageSize},
+    TotalFiltered, TotalElements) ->
+    TotalPages = trunc((length(TotalFiltered) + PageSize - 1) / PageSize),
+    [{total_count, length(TotalElements)},
+     {item_count, 0},
+     {filtered_count, length(TotalFiltered)},
+     {page, PageNum},
+     {page_size, PageSize},
+     {page_count, TotalPages},
+     {items, []}
+    ];
+range_response(List, #pagination{page = PageNum, page_size = PageSize},
+    TotalFiltered, TotalElements) ->
+    TotalPages = trunc((length(TotalFiltered) + PageSize - 1) / PageSize),
+    [{total_count, length(TotalElements)},
+     {item_count, length(List)},
+     {filtered_count, length(TotalFiltered)},
+     {page, PageNum},
+     {page_size, PageSize},
+     {page_count, TotalPages},
+     {items, List}
+    ].
+
 sort_key(_Item, []) ->
     [];
 sort_key(Item, [Sort | Sorts]) ->
@@ -229,6 +392,8 @@ get_dotted_value0([Key], Item) ->
 get_dotted_value0([Key | Keys], Item) ->
     get_dotted_value0(Keys, pget_bin(list_to_binary(Key), Item, [])).
 
+pget_bin(Key, {struct, List}, Default) ->
+    pget_bin(Key, List, Default);
 pget_bin(Key, List, Default) ->
     case lists:partition(fun ({K, _V}) -> a2b(K) =:= Key end, List) of
         {[{_K, V}], _} -> V;
@@ -285,11 +450,15 @@ internal_server_error(Error, Reason, ReqData, Context) ->
     rabbit_log:error("~s~n~s~n", [Error, Reason]),
     halt_response(500, Error, Reason, ReqData, Context).
 
+invalid_pagination(Type,Reason, ReqData, Context) ->
+    halt_response(400, Type, Reason, ReqData, Context).
+
 halt_response(Code, Type, Reason, ReqData, Context) ->
     Json = {struct, [{error, Type},
                      {reason, rabbit_mgmt_format:tuple(Reason)}]},
     ReqData1 = wrq:append_to_response_body(mochijson2:encode(Json), ReqData),
-    {{halt, Code}, ReqData1, Context}.
+    {{halt, Code}, set_resp_header(
+             "Content-Type", "application/json", ReqData1), Context}.
 
 id(Key, ReqData) when Key =:= exchange;
                       Key =:= source;
@@ -374,12 +543,12 @@ http_to_amqp(MethodName, ReqData, Context, Transformers, Extra) ->
 props_to_method(MethodName, Props, Transformers, Extra) ->
     Props1 = [{list_to_atom(binary_to_list(K)), V} || {K, V} <- Props],
     props_to_method(
-      MethodName, rabbit_mgmt_format:format(Props1 ++ Extra, Transformers)).
+      MethodName, rabbit_mgmt_format:format(Props1 ++ Extra, {Transformers, true})).
 
 props_to_method(MethodName, Props) ->
     Props1 = rabbit_mgmt_format:format(
                Props,
-               [{fun (Args) -> [{arguments, args(Args)}] end, [arguments]}]),
+               {fun rabbit_mgmt_format:format_args/1, true}),
     FieldNames = ?FRAMING:method_fieldnames(MethodName),
     {Res, _Idx} = lists:foldl(
                     fun (K, {R, Idx}) ->
@@ -580,6 +749,10 @@ range(ReqData) -> {range("lengths",    fun floor/2, ReqData),
 %% we use ceil() we stand a 50:50 chance of looking up the last sample
 %% in the range before we get it, and thus deriving an instantaneous
 %% rate of 0.0.
+%%
+%% Age is assumed to be > 0, Incr > 0 and (Age div Incr) <= ?MAX_RANGE.
+%% The latter condition allows us to limit the number of samples that
+%% will be sent to the client.
 range_ceil(ReqData) -> {range("lengths",    fun ceil/2,  ReqData),
                         range("msg_rates",  fun floor/2, ReqData),
                         range("data_rates", fun floor/2,  ReqData),
@@ -589,16 +762,18 @@ range(Prefix, Round, ReqData) ->
     Age0 = int(Prefix ++ "_age", ReqData),
     Incr0 = int(Prefix ++ "_incr", ReqData),
     if
-        is_integer(Age0) andalso is_integer(Incr0) ->
+        is_atom(Age0) orelse is_atom(Incr0) -> no_range;
+        (Age0 > 0) andalso (Incr0 > 0) andalso ((Age0 div Incr0) =< ?MAX_RANGE) ->
             Age = Age0 * 1000,
             Incr = Incr0 * 1000,
-            Now = rabbit_mgmt_format:now_to_ms(os:timestamp()),
+            Now = time_compat:os_system_time(milli_seconds),
             Last = Round(Now, Incr),
             #range{first = (Last - Age),
                    last  = Last,
                    incr  = Incr};
-        true ->
-            no_range
+        true -> throw({error, invalid_range_parameters,
+                    io_lib:format("Invalid range parameters: age ~p, incr ~p",
+                                  [Age0, Incr0])})
     end.
 
 floor(TS, Interval) -> (TS div Interval) * Interval.
@@ -616,3 +791,4 @@ int(Name, ReqData) ->
                          Integer     -> Integer
                      end
     end.
+
similarity index 82%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_aliveness_test.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl
index fec619609a2eb66dcca7768723ebd641381310ad..c019ea4d8927dff5de7423464bd43ac9deafd4a9 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_aliveness_test).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 -export([resource_exists/2]).
 
 -include("rabbit_mgmt.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_util:vhost(ReqData) of
          not_found -> false;
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_binding.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl
index 16e42ae3b97e851d92344db5fa0f16496122a4aa..5c533ebb5c3b6798a960722ab8aa840bc5fa06c8 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_binding).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, delete_resource/2,
          args_hash/1]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     Binding = binding(ReqData),
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_bindings.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl
index 4f8077819491ff6d72ee5c6ba77ecaabcb7dd515..887291a50d40e1d978f8d3a7364b5b70aaeec417 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_bindings).
@@ -20,6 +20,8 @@
 -export([allowed_methods/2, post_is_create/2, create_path/2]).
 -export([content_types_accepted/2, accept_content/2, resource_exists/2]).
 -export([basic/1, augmented/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 init([Mode]) ->
     {ok, {Mode, #context{}}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, {Mode, Context}) ->
     {case list_bindings(Mode, ReqData) of
          vhost_not_found -> false;
@@ -44,8 +53,8 @@ content_types_accepted(ReqData, Context) ->
 
 allowed_methods(ReqData, {Mode, Context}) ->
     {case Mode of
-         source_destination -> ['HEAD', 'GET', 'POST'];
-         _                  -> ['HEAD', 'GET']
+         source_destination -> ['HEAD', 'GET', 'POST', 'OPTIONS'];
+         _                  -> ['HEAD', 'GET', 'OPTIONS']
      end, ReqData, {Mode, Context}}.
 
 post_is_create(ReqData, Context) ->
similarity index 70%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_channel.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl
index 641043380c9b479e2a24c110b2e7490f21ef9ecc..e80769a10477e549346c46bc0772be3ce81b66fe 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_channel).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 -export([resource_exists/2]).
 
 -include("rabbit_mgmt.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     case channel(ReqData) of
         not_found -> {false, ReqData, Context};
@@ -42,7 +54,12 @@ to_json(ReqData, Context) ->
       ReqData, Context).
 
 is_authorized(ReqData, Context) ->
-    rabbit_mgmt_util:is_authorized_user(ReqData, Context, channel(ReqData)).
+    try
+        rabbit_mgmt_util:is_authorized_user(ReqData, Context, channel(ReqData))
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 %%--------------------------------------------------------------------
 
similarity index 65%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_channels.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl
index 6675fa2977e70585b11c703c14a6d7d203991d16..15b33a6c32b65d132ba8440f282d6fa5616703c9 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_channels).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
          augmented/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context).
+    try
+        rabbit_mgmt_util:reply_list_or_paginate(augmented(ReqData, Context),
+            ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized(ReqData, Context).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl
new file mode 100644 (file)
index 0000000..357c2a6
--- /dev/null
@@ -0,0 +1,66 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Plugin.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_wm_channels_vhost).
+
+%% Lists channels in a vhost
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
+         augmented/2, resource_exists/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
+
+-import(rabbit_misc, [pget/2]).
+
+-include("rabbit_mgmt.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+%%--------------------------------------------------------------------
+
+init(_Config) -> {ok, #context{}}.
+
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+    {rabbit_vhost:exists(rabbit_mgmt_util:id(vhost, ReqData)), ReqData, Context}.
+
+to_json(ReqData, Context) ->
+    try
+        rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
+
+is_authorized(ReqData, Context) ->
+    rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
+
+augmented(ReqData, Context) ->
+    rabbit_mgmt_util:filter_conn_ch_list(
+      rabbit_mgmt_db:get_all_channels(
+        rabbit_mgmt_util:range(ReqData)), ReqData, Context).
similarity index 77%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_cluster_name.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl
index f74182175be1f37318058b8984d4dd3c26f0e0cd..28ac785e08d09cc17c0cbfb92a80a63377e89299 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_cluster_name).
@@ -19,6 +19,8 @@
 -export([init/1, resource_exists/2, to_json/2,
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {true, ReqData, Context}.
@@ -46,7 +55,8 @@ to_json(ReqData, Context) ->
 accept_content(ReqData, Context) ->
     rabbit_mgmt_util:with_decode(
       [name], ReqData, Context, fun([Name], _) ->
-                                        rabbit_nodes:set_cluster_name(Name),
+                                        rabbit_nodes:set_cluster_name(
+                                          as_binary(Name)),
                                         {true, ReqData, Context}
                                 end).
 
@@ -55,3 +65,8 @@ is_authorized(ReqData, Context) ->
         'PUT' -> rabbit_mgmt_util:is_authorized_admin(ReqData, Context);
         _     -> rabbit_mgmt_util:is_authorized(ReqData, Context)
     end.
+
+as_binary(Val) when is_binary(Val) ->
+    Val;
+as_binary(Val) when is_list(Val) ->
+    list_to_binary(Val).
similarity index 77%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_connection.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl
index 523bec2fa7202decf046c1ec8c713eecfce02575..dae78e848ee82a7744464fe63885c9551839e893 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_connection).
 
 -export([init/1, resource_exists/2, to_json/2, content_types_provided/2,
          is_authorized/2, allowed_methods/2, delete_resource/2, conn/1]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     case conn(ReqData) of
@@ -57,7 +66,12 @@ delete_resource(ReqData, Context) ->
     {true, ReqData, Context}.
 
 is_authorized(ReqData, Context) ->
-    rabbit_mgmt_util:is_authorized_user(ReqData, Context, conn(ReqData)).
+    try
+        rabbit_mgmt_util:is_authorized_user(ReqData, Context, conn(ReqData))
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 %%--------------------------------------------------------------------
 
similarity index 71%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_connection_channels.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl
index 9becc91e49ef31caeda889bdd9e8a237858ce1ab..51b0b20a11ed672dec9c2a4f1a7d1c3f7f158bfd 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_connection_channels).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 -export([resource_exists/2]).
 
 -include("rabbit_mgmt.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     case rabbit_mgmt_wm_connection:conn(ReqData) of
         error -> {false, ReqData, Context};
@@ -45,8 +57,13 @@ to_json(ReqData, Context) ->
       ReqData, Context).
 
 is_authorized(ReqData, Context) ->
-    rabbit_mgmt_util:is_authorized_user(
-      ReqData, Context, rabbit_mgmt_wm_connection:conn(ReqData)).
+    try
+        rabbit_mgmt_util:is_authorized_user(
+          ReqData, Context, rabbit_mgmt_wm_connection:conn(ReqData))
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 %%--------------------------------------------------------------------
 
similarity index 65%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_connections.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl
index 1263d9103df7883f55e0a67af3f5f0047608f915..b118b6f49b8a850060efc7555e5d9d86d68bdeaa 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_connections).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
          augmented/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context).
+    try
+        rabbit_mgmt_util:reply_list_or_paginate(augmented(ReqData, Context),
+            ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized(ReqData, Context).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl
new file mode 100644 (file)
index 0000000..d158f4f
--- /dev/null
@@ -0,0 +1,66 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Plugin.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_wm_connections_vhost).
+
+%% Lists connections in a vhost
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
+         augmented/2, resource_exists/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
+
+-import(rabbit_misc, [pget/2]).
+
+-include("rabbit_mgmt.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+%%--------------------------------------------------------------------
+
+init(_Config) -> {ok, #context{}}.
+
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+    {rabbit_vhost:exists(rabbit_mgmt_util:id(vhost, ReqData)), ReqData, Context}.
+
+to_json(ReqData, Context) ->
+    try
+        rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
+
+is_authorized(ReqData, Context) ->
+    rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
+
+augmented(ReqData, Context) ->
+    rabbit_mgmt_util:filter_conn_ch_list(
+      rabbit_mgmt_db:get_all_connections(
+        rabbit_mgmt_util:range_ceil(ReqData)), ReqData, Context).
similarity index 80%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_consumers.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl
index 3d791d0b034e68cbe1c16deaae0e0452c4d540e2..c36d0ac6e2e7171af66430d2dbd6a94dd9d95d2a 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 
 -module(rabbit_mgmt_wm_consumers).
 
 -export([init/1, to_json/2, content_types_provided/2, resource_exists/2,
          is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_util:vhost(ReqData) of
          vhost_not_found -> false;
similarity index 66%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_definitions.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl
index a33d6a1cf16bd5f12e431bbc1cf448adef1cf851..6d02f3e102d67af7b8521a98ce776558ac093a71 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_definitions).
@@ -19,6 +19,8 @@
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
 -export([content_types_accepted/2, allowed_methods/2, accept_json/2]).
 -export([post_is_create/2, create_path/2, accept_multipart/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -export([apply_defs/3]).
 
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_json},
      {"multipart/form-data", accept_multipart}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'POST'], ReqData, Context}.
+    {['HEAD', 'GET', 'POST', 'OPTIONS'], ReqData, Context}.
 
 post_is_create(ReqData, Context) ->
     {true, ReqData, Context}.
@@ -48,6 +57,17 @@ create_path(ReqData, Context) ->
     {"dummy", ReqData, Context}.
 
 to_json(ReqData, Context) ->
+    case rabbit_mgmt_util:vhost(ReqData) of
+        none ->
+            all_definitions(ReqData, Context);
+        not_found ->
+            rabbit_mgmt_util:bad_request(list_to_binary("vhost_not_found"),
+                                         ReqData, Context);
+        _VHost ->
+            vhost_definitions(ReqData, Context)
+    end.
+
+all_definitions(ReqData, Context) ->
     Xs = [X || X <- rabbit_mgmt_wm_exchanges:basic(ReqData),
                export_exchange(X)],
     Qs = [Q || Q <- rabbit_mgmt_wm_queues:basic(ReqData),
@@ -76,6 +96,32 @@ to_json(ReqData, Context) ->
       end,
       Context).
 
+vhost_definitions(ReqData, Context) ->
+    %% rabbit_mgmt_wm_<>:basic/1 filters by VHost if it is available
+    Xs = [strip_vhost(X) || X <- rabbit_mgmt_wm_exchanges:basic(ReqData),
+               export_exchange(X)],
+    VQs = [Q || Q <- rabbit_mgmt_wm_queues:basic(ReqData), export_queue(Q)],
+    Qs = [strip_vhost(Q) || Q <- VQs],
+    QNames = [{pget(name, Q), pget(vhost, Q)} || Q <- VQs],
+    Bs = [strip_vhost(B) || B <- rabbit_mgmt_wm_bindings:basic(ReqData),
+                            export_binding(B, QNames)],
+    {ok, Vsn} = application:get_key(rabbit, vsn),
+    rabbit_mgmt_util:reply(
+      [{rabbit_version, list_to_binary(Vsn)}] ++
+          filter(
+            [{policies,    rabbit_mgmt_wm_policies:basic(ReqData)},
+             {queues,      Qs},
+             {exchanges,   Xs},
+             {bindings,    Bs}]),
+      case wrq:get_qs_value("download", ReqData) of
+          undefined -> ReqData;
+          Filename  -> rabbit_mgmt_util:set_resp_header(
+                         "Content-Disposition",
+                         "attachment; filename=" ++
+                             mochiweb_util:unquote(Filename), ReqData)
+      end,
+      Context).
+
 accept_json(ReqData, Context) ->
     accept(wrq:req_body(ReqData), ReqData, Context).
 
@@ -116,16 +162,31 @@ is_authorized_qs(ReqData, Context, Auth) ->
 %%--------------------------------------------------------------------
 
 accept(Body, ReqData, Context) ->
-    apply_defs(Body, fun() -> {true, ReqData, Context} end,
-               fun(E) -> rabbit_mgmt_util:bad_request(E, ReqData, Context) end).
+    case rabbit_mgmt_util:vhost(ReqData) of
+        none ->
+            apply_defs(Body, fun() -> {true, ReqData, Context} end,
+                       fun(E) -> rabbit_mgmt_util:bad_request(E, ReqData, Context) end);
+        not_found ->
+            rabbit_mgmt_util:bad_request(list_to_binary("vhost_not_found"),
+                                         ReqData, Context);
+        VHost ->
+            apply_defs(Body, fun() -> {true, ReqData, Context} end,
+                       fun(E) -> rabbit_mgmt_util:bad_request(E, ReqData, Context) end,
+                       VHost)
+    end.
 
 apply_defs(Body, SuccessFun, ErrorFun) ->
     case rabbit_mgmt_util:decode([], Body) of
         {error, E} ->
             ErrorFun(E);
         {ok, _, All} ->
+            Version = pget(rabbit_version, All),
             try
-                for_all(users,       All, fun add_user/1),
+                for_all(users,       All, fun(User) -> 
+                                              rabbit_mgmt_wm_user:put_user(
+                                                  User, 
+                                                  Version) 
+                                          end),
                 for_all(vhosts,      All, fun add_vhost/1),
                 for_all(permissions, All, fun add_permission/1),
                 for_all(parameters,  All, fun add_parameter/1),
@@ -139,6 +200,22 @@ apply_defs(Body, SuccessFun, ErrorFun) ->
             end
     end.
 
+apply_defs(Body, SuccessFun, ErrorFun, VHost) ->
+    case rabbit_mgmt_util:decode([], Body) of
+        {error, E} ->
+            ErrorFun(E);
+        {ok, _, All} ->
+            try
+                for_all(policies,    All, VHost, fun add_policy/2),
+                for_all(queues,      All, VHost, fun add_queue/2),
+                for_all(exchanges,   All, VHost, fun add_exchange/2),
+                for_all(bindings,    All, VHost, fun add_binding/2),
+                SuccessFun()
+            catch {error, E} -> ErrorFun(format(E));
+                  exit:E     -> ErrorFun(format(E))
+            end
+    end.
+
 format(#amqp_error{name = Name, explanation = Explanation}) ->
     list_to_binary(rabbit_misc:format("~s: ~s", [Name, Explanation]));
 format(E) ->
@@ -175,7 +252,7 @@ export_name(_Name)                -> true.
 %%--------------------------------------------------------------------
 
 rw_state() ->
-    [{users,       [name, password_hash, tags]},
+    [{users,       [name, password_hash, hashing_algorithm, tags]},
      {vhosts,      [name]},
      {permissions, [user, vhost, configure, write, read]},
      {parameters,  [vhost, component, name, value]},
@@ -195,6 +272,9 @@ filter_items(Name, List, Allowed) ->
 filter_item(Item, Allowed) ->
     [{K, Fact} || {K, Fact} <- Item, lists:member(K, Allowed)].
 
+strip_vhost(Item) ->
+    lists:keydelete(vhost, 1, Item).
+
 %%--------------------------------------------------------------------
 
 for_all(Name, All, Fun) ->
@@ -204,6 +284,13 @@ for_all(Name, All, Fun) ->
                          {struct, I} <- List]
     end.
 
+for_all(Name, All, VHost, Fun) ->
+    case pget(Name, All) of
+        undefined -> ok;
+        List      -> [Fun(VHost, [{atomise_name(K), V} || {K, V} <- I]) ||
+                         {struct, I} <- List]
+    end.
+
 atomise_name(N) -> list_to_atom(binary_to_list(N)).
 
 %%--------------------------------------------------------------------
@@ -222,6 +309,9 @@ add_parameter(Param) ->
 
 add_policy(Param) ->
     VHost = pget(vhost, Param),
+    add_policy(VHost, Param).
+
+add_policy(VHost, Param) ->
     Key   = pget(name,  Param),
     case rabbit_policy:set(
            VHost, Key, pget(pattern, Param),
@@ -233,9 +323,6 @@ add_policy(Param) ->
                              exit(list_to_binary(E ++ S))
     end.
 
-add_user(User) ->
-    rabbit_mgmt_wm_user:put_user(User).
-
 add_vhost(VHost) ->
     VHostName = pget(name, VHost),
     VHostTrace = pget(tracing, VHost),
@@ -249,18 +336,30 @@ add_permission(Permission) ->
                                                  pget(read,      Permission)).
 
 add_queue(Queue) ->
-    rabbit_amqqueue:declare(r(queue,                              Queue),
+    add_queue_int(Queue, r(queue, Queue)).
+
+add_queue(VHost, Queue) ->
+    add_queue_int(Queue, rv(VHost, queue, Queue)).
+
+add_queue_int(Queue, Name) ->
+    rabbit_amqqueue:declare(Name,
                             pget(durable,                         Queue),
                             pget(auto_delete,                     Queue),
                             rabbit_mgmt_util:args(pget(arguments, Queue)),
                             none).
 
 add_exchange(Exchange) ->
+    add_exchange_int(Exchange, r(exchange, Exchange)).
+
+add_exchange(VHost, Exchange) ->
+    add_exchange_int(Exchange, rv(VHost, exchange, Exchange)).
+
+add_exchange_int(Exchange, Name) ->
     Internal = case pget(internal, Exchange) of
                    undefined -> false; %% =< 2.2.0
                    I         -> I
                end,
-    rabbit_exchange:declare(r(exchange,                           Exchange),
+    rabbit_exchange:declare(Name,
                             rabbit_exchange:check_type(pget(type, Exchange)),
                             pget(durable,                         Exchange),
                             pget(auto_delete,                     Exchange),
@@ -268,14 +367,31 @@ add_exchange(Exchange) ->
                             rabbit_mgmt_util:args(pget(arguments, Exchange))).
 
 add_binding(Binding) ->
-    DestType = list_to_atom(binary_to_list(pget(destination_type, Binding))),
+    DestType = dest_type(Binding),
+    add_binding_int(Binding, r(exchange, source, Binding),
+                    r(DestType, destination, Binding)).
+
+add_binding(VHost, Binding) ->
+    DestType = dest_type(Binding),
+    add_binding_int(Binding, rv(VHost, exchange, source, Binding),
+                    rv(VHost, DestType, destination, Binding)).
+
+add_binding_int(Binding, Source, Destination) ->
     rabbit_binding:add(
-      #binding{source       = r(exchange, source,                   Binding),
-               destination  = r(DestType, destination,              Binding),
-               key          = pget(routing_key,                     Binding),
+      #binding{source       = Source,
+               destination  = Destination,
+               key          = pget(routing_key, Binding),
                args         = rabbit_mgmt_util:args(pget(arguments, Binding))}).
 
+dest_type(Binding) ->
+    list_to_atom(binary_to_list(pget(destination_type, Binding))).
+
 r(Type, Props) -> r(Type, name, Props).
 
 r(Type, Name, Props) ->
     rabbit_misc:r(pget(vhost, Props), Type, pget(Name, Props)).
+
+rv(VHost, Type, Props) -> rv(VHost, Type, name, Props).
+
+rv(VHost, Type, Name, Props) ->
+    rabbit_misc:r(VHost, Type, pget(Name, Props)).
similarity index 76%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_exchange.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl
index 374280ab7c00dcfc584a85ef4836460b919953fc..7f7ba767ab01203d5c12c06573f7a8d0e3e1654b 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_exchange).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
          delete_resource/2, exchange/1, exchange/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case exchange(ReqData) of
@@ -44,14 +53,19 @@ resource_exists(ReqData, Context) ->
      end, ReqData, Context}.
 
 to_json(ReqData, Context) ->
-    [X] = rabbit_mgmt_db:augment_exchanges(
-            [exchange(ReqData)], rabbit_mgmt_util:range(ReqData), full),
-    rabbit_mgmt_util:reply(X, ReqData, Context).
+    try
+        [X] = rabbit_mgmt_db:augment_exchanges(
+                [exchange(ReqData)], rabbit_mgmt_util:range(ReqData), full),
+        rabbit_mgmt_util:reply(X, ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 accept_content(ReqData, Context) ->
     rabbit_mgmt_util:http_to_amqp(
       'exchange.declare', ReqData, Context,
-      [{fun rabbit_mgmt_util:parse_bool/1, [durable, auto_delete, internal]}],
+      fun rabbit_mgmt_format:format_accept_content/1,
       [{exchange, rabbit_mgmt_util:id(exchange, ReqData)}]).
 
 delete_resource(ReqData, Context) ->
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_exchange_publish.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl
index 467c055be76f3274f96893030083c810f3321c81..ff2dbb1d11fb4d00e7ee9e4799ac1e97a02e39f7 100644 (file)
@@ -18,6 +18,8 @@
 
 -export([init/1, resource_exists/2, post_is_create/2, is_authorized/2,
          allowed_methods/2,  content_types_provided/2, process_post/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 allowed_methods(ReqData, Context) ->
-    {['POST'], ReqData, Context}.
+    {['POST', 'OPTIONS'], ReqData, Context}.
 
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_wm_exchange:exchange(ReqData) of
          not_found -> false;
similarity index 71%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_exchanges.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl
index f1df46a0838c6bceda397d5221f32c073561cddc..529acbc13c30dd693b688297fd03d2bf121016ca 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_exchanges).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
          resource_exists/2, basic/1, augmented/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case exchanges0(ReqData) of
          vhost_not_found -> false;
@@ -37,7 +49,13 @@ resource_exists(ReqData, Context) ->
      end, ReqData, Context}.
 
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context).
+    try
+        rabbit_mgmt_util:reply_list_or_paginate(augmented(ReqData, Context),
+            ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
similarity index 77%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_extensions.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl
index b38bf1d795a436a103423c6e4f83500030aca167..5de1b1674791158c7ea245067e647390db1de4d5 100644 (file)
@@ -17,6 +17,8 @@
 -module(rabbit_mgmt_wm_extensions).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
     Modules = rabbit_mgmt_dispatcher:modules([]),
     rabbit_mgmt_util:reply(
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl
new file mode 100644 (file)
index 0000000..eb71a3f
--- /dev/null
@@ -0,0 +1,79 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+-module(rabbit_mgmt_wm_healthchecks).
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
+-export([resource_exists/2]).
+
+-include("rabbit_mgmt.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+
+%%--------------------------------------------------------------------
+
+init(_Config) -> {ok, #context{}}.
+
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+    {case node0(ReqData) of
+         not_found -> false;
+         _         -> true
+     end, ReqData, Context}.
+
+to_json(ReqData, Context) ->
+    Node = node0(ReqData),
+    try
+        Timeout = case wrq:get_req_header("timeout", ReqData) of
+                      undefined -> 70000;
+                      Val       -> list_to_integer(Val)
+                  end,
+        rabbit_health_check:node(Node, Timeout),
+        rabbit_mgmt_util:reply([{status, ok}], ReqData, Context)
+    catch
+        {node_is_ko, ErrorMsg, _ErrorCode} ->
+            rabbit_mgmt_util:reply([{status, failed},
+                                    {reason, rabbit_mgmt_format:print(ErrorMsg)}],
+                                   ReqData, Context)
+    end.
+
+is_authorized(ReqData, Context) ->
+    rabbit_mgmt_util:is_authorized(ReqData, Context).
+
+node0(ReqData) ->
+    Node = case rabbit_mgmt_util:id(node, ReqData) of
+               none ->
+                   node();
+               Node0 ->
+                   list_to_atom(binary_to_list(Node0))
+           end,
+    case [N || N <- rabbit_mgmt_wm_nodes:all_nodes(ReqData),
+               proplists:get_value(name, N) == Node] of
+        []     -> not_found;
+        [_] -> Node
+    end.
similarity index 83%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_node.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl
index 646d1838a0f10675e1fa0aff4f5103c66b5d3aa5..1bc41f3db152ec12e04eaa93fbc5f8e1718572e1 100644 (file)
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_node).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 -export([resource_exists/2]).
 
 -include("rabbit_mgmt.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case node0(ReqData) of
          not_found -> false;
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl
new file mode 100644 (file)
index 0000000..37ed784
--- /dev/null
@@ -0,0 +1,92 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Console.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_wm_node_memory).
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
+-export([resource_exists/2]).
+
+-include("rabbit_mgmt.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+%%--------------------------------------------------------------------
+
+init([Mode]) -> {ok, {Mode, #context{}}}.
+
+finish_request(ReqData, {Mode, Context}) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), {Mode, Context}}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+    {node_exists(ReqData, get_node(ReqData)), ReqData, Context}.
+
+to_json(ReqData, {Mode, Context}) ->
+    rabbit_mgmt_util:reply(augment(Mode, ReqData), ReqData, {Mode, Context}).
+
+is_authorized(ReqData, {Mode, Context}) ->
+    {Res, RD, C} = rabbit_mgmt_util:is_authorized_monitor(ReqData, Context),
+    {Res, RD, {Mode, C}}.
+
+%%--------------------------------------------------------------------
+get_node(ReqData) ->
+    list_to_atom(binary_to_list(rabbit_mgmt_util:id(node, ReqData))).
+
+node_exists(ReqData, Node) ->
+    case [N || N <- rabbit_mgmt_wm_nodes:all_nodes(ReqData),
+               proplists:get_value(name, N) == Node] of
+        [] -> false;
+        [_] -> true
+    end.
+
+augment(Mode, ReqData) ->
+    Node = get_node(ReqData),
+    case node_exists(ReqData, Node) of
+        false ->
+            not_found;
+        true ->
+            case rpc:call(Node, rabbit_vm, memory, [], infinity) of
+                {badrpc, _} -> [{memory, not_available}];
+                Result      -> [{memory, format(Mode, Result)}]
+            end
+    end.
+
+format(absolute, Result) ->
+    Result;
+format(relative, Result) ->
+    {[{total, Total}], Rest} = lists:splitwith(fun({Key, _}) ->
+                                                       Key == total
+                                               end, Result),
+    [{total, 100} | [{K, percentage(V, Total)} || {K, V} <- Rest]].
+
+percentage(Part, Total) ->
+    case round((Part/Total) * 100) of
+        0 when Part =/= 0 ->
+            1;
+        Int ->
+            Int
+    end.
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl
new file mode 100644 (file)
index 0000000..e7ce6dd
--- /dev/null
@@ -0,0 +1,102 @@
+%%   The contents of this file are subject to the Mozilla Public License
+%%   Version 1.1 (the "License"); you may not use this file except in
+%%   compliance with the License. You may obtain a copy of the License at
+%%   http://www.mozilla.org/MPL/
+%%
+%%   Software distributed under the License is distributed on an "AS IS"
+%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+%%   License for the specific language governing rights and limitations
+%%   under the License.
+%%
+%%   The Original Code is RabbitMQ Management Console.
+%%
+%%   The Initial Developer of the Original Code is GoPivotal, Inc.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mgmt_wm_node_memory_ets).
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
+-export([resource_exists/2]).
+
+-include("rabbit_mgmt.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+%%--------------------------------------------------------------------
+
+init([Mode]) -> {ok, {Mode, #context{}}}.
+
+finish_request(ReqData, {Mode, Context}) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), {Mode, Context}}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+    {node_exists(ReqData, get_node(ReqData)), ReqData, Context}.
+
+to_json(ReqData, {Mode, Context}) ->
+    rabbit_mgmt_util:reply(augment(Mode, ReqData), ReqData, {Mode, Context}).
+
+is_authorized(ReqData, {Mode, Context}) ->
+    {Res, RD, C} = rabbit_mgmt_util:is_authorized_monitor(ReqData, Context),
+    {Res, RD, {Mode, C}}.
+
+%%--------------------------------------------------------------------
+get_node(ReqData) ->
+    list_to_atom(binary_to_list(rabbit_mgmt_util:id(node, ReqData))).
+
+get_filter(ReqData) ->
+    case rabbit_mgmt_util:id(filter, ReqData) of
+        none                        -> all;
+        <<"management">>            -> rabbit_mgmt_event_collector;
+        Other when is_binary(Other) -> list_to_atom(binary_to_list(Other));
+        _                           -> all
+    end.
+
+node_exists(ReqData, Node) ->
+    case [N || N <- rabbit_mgmt_wm_nodes:all_nodes(ReqData),
+               proplists:get_value(name, N) == Node] of
+        [] -> false;
+        [_] -> true
+    end.
+
+augment(Mode, ReqData) ->
+    Node = get_node(ReqData),
+    Filter = get_filter(ReqData),
+    case node_exists(ReqData, Node) of
+        false ->
+            not_found;
+        true ->
+            case rpc:call(Node, rabbit_vm, ets_tables_memory,
+                          [Filter], infinity) of
+                {badrpc, _} -> [{ets_tables_memory, not_available}];
+                []          -> [{ets_tables_memory, no_tables}];
+                Result      -> [{ets_tables_memory, format(Mode, Result)}]
+            end
+    end.
+
+format(absolute, Result) ->
+    Total = lists:sum([V || {_K,V} <- Result]),
+    [{total, Total} | Result];
+format(relative, Result) ->
+    Total = lists:sum([V || {_K,V} <- Result]),
+    [{total, 100} | [{K, percentage(V, Total)} || {K, V} <- Result]].
+
+percentage(Part, Total) ->
+    case round((Part/Total) * 100) of
+        0 when Part =/= 0 ->
+            1;
+        Int ->
+            Int
+    end.
similarity index 71%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_nodes.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl
index fb86e1efb3a56597301d9064b2326aecb3be8744..e1cbf089af0e0de10d348852dd2f9c42db997697 100644 (file)
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_nodes).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
 -export([all_nodes/1, all_nodes_raw/0]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply_list(all_nodes(ReqData), ReqData, Context).
+    try
+        rabbit_mgmt_util:reply_list(all_nodes(ReqData), ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized_monitor(ReqData, Context).
similarity index 67%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_overview.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl
index 486016ca1777034965175c1e93fe419bf56f5c84..fe36948b2230fac94a25dbb2114d51438ccfc765 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_overview).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2, pget/3]).
 
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, ?MODULE), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context = #context{user = User = #user{tags = Tags}}) ->
     {ok, RatesMode} = application:get_env(rabbitmq_management, rates_mode),
     %% NB: this duplicates what's in /nodes but we want a global idea
@@ -43,23 +55,28 @@ to_json(ReqData, Context = #context{user = User = #user{tags = Tags}}) ->
                  {cluster_name,        rabbit_nodes:cluster_name()},
                  {erlang_version,      erlang_version()},
                  {erlang_full_version, erlang_full_version()}],
-    Range = rabbit_mgmt_util:range(ReqData),
-    Overview =
-        case rabbit_mgmt_util:is_monitor(Tags) of
-            true ->
-                Overview0 ++
-                    [{K, maybe_struct(V)} ||
-                        {K,V} <- rabbit_mgmt_db:get_overview(Range)] ++
-                    [{node,               node()},
-                     {statistics_db_node, stats_db_node()},
-                     {listeners,          listeners()},
-                     {contexts,           web_contexts(ReqData)}];
-            _ ->
-                Overview0 ++
-                    [{K, maybe_struct(V)} ||
-                        {K, V} <- rabbit_mgmt_db:get_overview(User, Range)]
-        end,
-    rabbit_mgmt_util:reply(Overview, ReqData, Context).
+    try
+        Range = rabbit_mgmt_util:range(ReqData),
+        Overview =
+            case rabbit_mgmt_util:is_monitor(Tags) of
+                true ->
+                    Overview0 ++
+                        [{K, maybe_struct(V)} ||
+                            {K,V} <- rabbit_mgmt_db:get_overview(Range)] ++
+                        [{node,               node()},
+                         {statistics_db_node, stats_db_node()},
+                         {listeners,          listeners()},
+                         {contexts,           web_contexts(ReqData)}];
+                _ ->
+                    Overview0 ++
+                        [{K, maybe_struct(V)} ||
+                            {K, V} <- rabbit_mgmt_db:get_overview(User, Range)]
+            end,
+        rabbit_mgmt_util:reply(Overview, ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized(ReqData, Context).
similarity index 84%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_parameter.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl
index 5af5d38bf30bfaf21645db87460d6963649b103b..a72693a57d1ff2822b0821ec5d4dab380d02802f 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_parameter).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
          delete_resource/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case parameter(ReqData) of
@@ -47,7 +56,8 @@ resource_exists(ReqData, Context) ->
      end, ReqData, Context}.
 
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply(rabbit_mgmt_format:parameter(parameter(ReqData)),
+    rabbit_mgmt_util:reply(rabbit_mgmt_format:parameter(
+        rabbit_mgmt_wm_parameters:fix_shovel_publish_properties(parameter(ReqData))),
                            ReqData, Context).
 
 accept_content(ReqData, Context = #context{user = User}) ->
similarity index 61%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_parameters.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl
index 0664bafe181577b03695c7eebc27712eec8b4b8c..696879d3efa2f553b19f579b10106e8c6c2f9691 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_parameters).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
          resource_exists/2, basic/1]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
+-export([fix_shovel_publish_properties/1]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case basic(ReqData) of
          not_found -> false;
@@ -59,5 +72,24 @@ basic(ReqData) ->
           end,
     case Raw of
         not_found -> not_found;
-        _         -> [rabbit_mgmt_format:parameter(P) || P <- Raw]
+        _         -> [rabbit_mgmt_format:parameter(fix_shovel_publish_properties(P)) || P <- Raw]
+    end.
+
+%% Hackish fix to make sure we return a JSON object instead of an empty list
+%% when the publish-properties value is empty. Should be removed in 3.7.0
+%% when we switch to a new JSON library.
+fix_shovel_publish_properties(P) ->
+    case lists:keyfind(component, 1, P) of
+        {_, <<"shovel">>} ->
+            case lists:keytake(value, 1, P) of
+                {value, {_, Values}, P2} ->
+                    case lists:keytake(<<"publish-properties">>, 1, Values) of
+                        {_, {_, []}, Values2} ->
+                            P2 ++ [{value, Values2 ++ [{<<"publish-properties">>, empty_struct}]}];
+                        _ ->
+                            P
+                    end;
+                _ -> P
+            end;
+        _ -> P
     end.
similarity index 88%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permission.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl
index ec1046b64f30ab3c95043b7c799bc5601fb1d3e9..21cc29408d7792a976967d7a536e735741f0fb68 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_permission).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
          delete_resource/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case perms(ReqData) of
similarity index 75%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permissions.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl
index 52f4771ef2e5decc3f8c12f79cccfedc6d2ab766..46ad475ce202b8682c492dd75f973a8eb1e9c6c0 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_permissions).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
 -export([permissions/0]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
     rabbit_mgmt_util:reply_list(permissions(), ["vhost", "user"],
                                 ReqData, Context).
similarity index 77%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permissions_user.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl
index df19045209c58a223b5d48f6afe785ce9946db40..bcf8d383dea15ee718d808e682de5c679aa815f9 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_permissions_user).
 
 -export([init/1, to_json/2, content_types_provided/2, resource_exists/2,
          is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_wm_user:user(ReqData) of
          {ok, _}    -> true;
similarity index 77%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_permissions_vhost.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl
index 062a902222b44c974be59ee840cd3691f4d8cb8c..ba220b8945f8dbe9ebd28bfebbd4077287784127 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_permissions_vhost).
 
 -export([init/1, to_json/2, content_types_provided/2, resource_exists/2,
          is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {rabbit_vhost:exists(rabbit_mgmt_wm_vhost:id(ReqData)), ReqData, Context}.
 
similarity index 76%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_policies.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl
index 1236bcdb80934641e88876749eef52a9f9118059..e51074565993e950c9b206072f492fd3904e90ea 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_policies).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
          resource_exists/2, basic/1]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case basic(ReqData) of
          not_found -> false;
@@ -42,7 +54,7 @@ to_json(ReqData, Context) ->
       ["priority"], ReqData, Context).
 
 is_authorized(ReqData, Context) ->
-    rabbit_mgmt_util:is_authorized_policies(ReqData, Context).
+    rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
 
 %%--------------------------------------------------------------------
 
similarity index 87%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_policy.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl
index fa9e1aaa10d4b446a084067b9cf615f69140f8a3..3460a87a521b737f30aa78667cfc554ab446f14c 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_policy).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
          delete_resource/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case policy(ReqData) of
similarity index 74%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl
index fb7e8ab48c0c03134be5e8929a76aeb6f70a9375..d1e87a59d28e0ac48b35a4399fd23968872f9b35 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_queue).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
          delete_resource/2, queue/1, queue/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case queue(ReqData) of
@@ -44,14 +53,19 @@ resource_exists(ReqData, Context) ->
      end, ReqData, Context}.
 
 to_json(ReqData, Context) ->
-    [Q] = rabbit_mgmt_db:augment_queues(
-            [queue(ReqData)], rabbit_mgmt_util:range_ceil(ReqData), full),
-    rabbit_mgmt_util:reply(rabbit_mgmt_format:strip_pids(Q), ReqData, Context).
+    try
+        [Q] = rabbit_mgmt_db:augment_queues(
+                [queue(ReqData)], rabbit_mgmt_util:range_ceil(ReqData), full),
+        rabbit_mgmt_util:reply(rabbit_mgmt_format:strip_pids(Q), ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 accept_content(ReqData, Context) ->
-   rabbit_mgmt_util:http_to_amqp(
+    rabbit_mgmt_util:http_to_amqp(
       'queue.declare', ReqData, Context,
-      [{fun rabbit_mgmt_util:parse_bool/1, [durable, auto_delete]}],
+      fun rabbit_mgmt_format:format_accept_content/1,
       [{queue, rabbit_mgmt_util:id(queue, ReqData)}]).
 
 delete_resource(ReqData, Context) ->
similarity index 87%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue_actions.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl
index a6017643dccefc7248724ea708ead4010c3f62d0..b75e2dc4899a538373ab14dc973875c29cd94b71 100644 (file)
@@ -18,6 +18,8 @@
 
 -export([init/1, resource_exists/2, post_is_create/2, is_authorized/2,
          allowed_methods/2, process_post/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 allowed_methods(ReqData, Context) ->
-    {['POST'], ReqData, Context}.
+    {['POST', 'OPTIONS'], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_wm_queue:queue(ReqData) of
similarity index 90%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue_get.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl
index 8998bfcc8145931ca59993dabf8cd42a9ab112bd..85cd5c8cd696254f6bf3cfb50880f339f5cedcfa 100644 (file)
@@ -17,7 +17,9 @@
 -module(rabbit_mgmt_wm_queue_get).
 
 -export([init/1, resource_exists/2, post_is_create/2, is_authorized/2,
-         allowed_methods/2, process_post/2]).
+  allowed_methods/2, process_post/2, content_types_provided/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 allowed_methods(ReqData, Context) ->
-    {['POST'], ReqData, Context}.
+    {['POST', 'OPTIONS'], ReqData, Context}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_wm_queue:queue(ReqData) of
similarity index 78%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queue_purge.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl
index 42aeb953212a120a3307a3c2685eec422fb73003..b4d94badb2fab0b8cfd2cea0c5861642eeb27115 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_queue_purge).
 
 -export([init/1, resource_exists/2, is_authorized/2, allowed_methods/2,
          delete_resource/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 allowed_methods(ReqData, Context) ->
-    {['DELETE'], ReqData, Context}.
+    {['DELETE', 'OPTIONS'], ReqData, Context}.
+
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case rabbit_mgmt_wm_queue:queue(ReqData) of
similarity index 67%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_queues.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl
index 51265c7301d70c7f91e7702e1a25e11e1fd9d7ff..2a67890d8f85d6d12c21223b89e1011a172913ee 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_queues).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
          resource_exists/2, basic/1, augmented/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 resource_exists(ReqData, Context) ->
     {case queues0(ReqData) of
          vhost_not_found -> false;
          _               -> true
      end, ReqData, Context}.
 
+
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context).
+    try
+        rabbit_mgmt_util:reply_list_or_paginate(
+          augmented(ReqData, Context), ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized_vhost(ReqData, Context).
@@ -45,10 +64,9 @@ is_authorized(ReqData, Context) ->
 %%--------------------------------------------------------------------
 
 augmented(ReqData, Context) ->
-    rabbit_mgmt_format:strip_pids(
-      rabbit_mgmt_db:augment_queues(
-        rabbit_mgmt_util:filter_vhost(basic(ReqData), ReqData, Context),
-        rabbit_mgmt_util:range_ceil(ReqData), basic)).
+    rabbit_mgmt_db:augment_queues(
+      rabbit_mgmt_util:filter_vhost(basic(ReqData), ReqData, Context),
+      rabbit_mgmt_util:range_ceil(ReqData), basic).
 
 basic(ReqData) ->
     [rabbit_mgmt_format:queue(Q) || Q <- queues0(ReqData)] ++
similarity index 57%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_user.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl
index b8b65291b19a4217ff2b859ea9b309ff941fae8c..b62f17a8055033dae7a4bcb0d2323b963f29e611 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_user).
@@ -19,7 +19,9 @@
 -export([init/1, resource_exists/2, to_json/2,
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
-         delete_resource/2, user/1, put_user/1]).
+         delete_resource/2, user/1, put_user/1, put_user/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {case user(ReqData) of
@@ -72,19 +81,30 @@ is_authorized(ReqData, Context) ->
 user(ReqData) ->
     rabbit_auth_backend_internal:lookup_user(rabbit_mgmt_util:id(user, ReqData)).
 
-put_user(User) ->
-    CP = fun rabbit_auth_backend_internal:change_password/2,
-    CPH = fun rabbit_auth_backend_internal:change_password_hash/2,
-    case {proplists:is_defined(password, User),
-          proplists:is_defined(password_hash, User)} of
-        {true, _} -> put_user(User, pget(password, User), CP);
-        {_, true} -> Hash = rabbit_mgmt_util:b64decode_or_throw(
-                              pget(password_hash, User)),
-                     put_user(User, Hash, CPH);
-        _         -> put_user(User, <<>>, CPH)
-    end.
-
-put_user(User, PWArg, PWFun) ->
+put_user(User) -> put_user(User, undefined).
+
+put_user(User, Version) ->
+    PasswordUpdateFun = 
+        fun(Username) ->
+                case {proplists:is_defined(password, User),
+                      proplists:is_defined(password_hash, User)} of
+                    {true, _} ->
+                        rabbit_auth_backend_internal:change_password(
+                          Username, pget(password, User));
+                    {_, true} ->
+                        HashingAlgorithm = hashing_algorithm(User, Version),
+
+                        Hash = rabbit_mgmt_util:b64decode_or_throw(
+                                 pget(password_hash, User)),
+                        rabbit_auth_backend_internal:change_password_hash(
+                          Username, Hash, HashingAlgorithm);
+                    _         ->
+                        rabbit_auth_backend_internal:clear_password(Username)
+                end
+        end,
+    put_user0(User, PasswordUpdateFun).
+
+put_user0(User, PasswordUpdateFun) ->
     Username = pget(name, User),
     Tags = case {pget(tags, User), pget(administrator, User)} of
                {undefined, undefined} ->
@@ -105,5 +125,23 @@ put_user(User, PWArg, PWFun) ->
         _ ->
             ok
     end,
-    PWFun(Username, PWArg),
+    PasswordUpdateFun(Username),
     ok = rabbit_auth_backend_internal:set_tags(Username, Tags).
+
+hashing_algorithm(User, Version) ->
+    case pget(hashing_algorithm, User) of
+        undefined ->
+            case Version of
+                %% 3.6.1 and later versions are supposed to have
+                %% the algorithm exported and thus not need a default
+                <<"3.6.0">>          -> rabbit_password_hashing_sha256;
+                <<"3.5.", _/binary>> -> rabbit_password_hashing_md5;
+                <<"3.4.", _/binary>> -> rabbit_password_hashing_md5;
+                <<"3.3.", _/binary>> -> rabbit_password_hashing_md5;
+                <<"3.2.", _/binary>> -> rabbit_password_hashing_md5;
+                <<"3.1.", _/binary>> -> rabbit_password_hashing_md5;
+                <<"3.0.", _/binary>> -> rabbit_password_hashing_md5;
+                _                    -> rabbit_password:hashing_mod()
+            end;
+        Alg       -> binary_to_atom(Alg, utf8)
+    end.
similarity index 77%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_users.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl
index 7bf07bf180094be68e30d0f75c450ccab2604a5b..c6ba4f531ed060c120c34d37824c7c65214c2e88 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_users).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 -export([users/0]).
 
 -import(rabbit_misc, [pget/2]).
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
     rabbit_mgmt_util:reply_list(users(), ReqData, Context).
 
similarity index 75%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_vhost.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl
index 0908d90d48046a33a94e7223521bd8e8c28974fb..09c8c526808a7484b792154aececd3c416f50736 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_vhost).
@@ -20,6 +20,8 @@
          content_types_provided/2, content_types_accepted/2,
          is_authorized/2, allowed_methods/2, accept_content/2,
          delete_resource/2, id/1, put_vhost/2]).
+-export([finish_request/2]).
+-export([encodings_provided/2]).
 
 -import(rabbit_misc, [pget/2]).
 
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 content_types_accepted(ReqData, Context) ->
    {[{"application/json", accept_content}], ReqData, Context}.
 
 allowed_methods(ReqData, Context) ->
-    {['HEAD', 'GET', 'PUT', 'DELETE'], ReqData, Context}.
+    {['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS'], ReqData, Context}.
 
 resource_exists(ReqData, Context) ->
     {rabbit_vhost:exists(id(ReqData)), ReqData, Context}.
 
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply(
-      hd(rabbit_mgmt_db:augment_vhosts(
-           [rabbit_vhost:info(id(ReqData))], rabbit_mgmt_util:range(ReqData))),
-      ReqData, Context).
+    try
+        rabbit_mgmt_util:reply(
+          hd(rabbit_mgmt_db:augment_vhosts(
+               [rabbit_vhost:info(id(ReqData))], rabbit_mgmt_util:range(ReqData))),
+          ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 accept_content(ReqData, Context) ->
     Name = id(ReqData),
similarity index 67%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_vhosts.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl
index 961b6f71b51027fe502285daed6727333b3aed2b..faeb0f51d758d545372cd2c7f572056d00791655 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_vhosts).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 -export([basic/0, augmented/2]).
 
 -include("rabbit_mgmt.hrl").
 
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context) ->
-    rabbit_mgmt_util:reply_list(augmented(ReqData, Context), ReqData, Context).
+    try
+        rabbit_mgmt_util:reply_list_or_paginate(
+          augmented(ReqData, Context),ReqData, Context)
+    catch
+        {error, invalid_range_parameters, Reason} ->
+            rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData, Context)
+    end.
 
 is_authorized(ReqData, Context) ->
     rabbit_mgmt_util:is_authorized(ReqData, Context).
similarity index 73%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_whoami.erl
rename to deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl
index 564b394681039c55dc6ecc299dcbf651d8ffdca2..5d262a7fb3099fe75a24aee2d58a1fc9b2411627 100644 (file)
 %%   The Original Code is RabbitMQ Management Plugin.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_wm_whoami).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+-export([finish_request/2, allowed_methods/2]).
+-export([encodings_provided/2]).
 
 -include("rabbit_mgmt.hrl").
 -include_lib("webmachine/include/webmachine.hrl").
 %%--------------------------------------------------------------------
 init(_Config) -> {ok, #context{}}.
 
+finish_request(ReqData, Context) ->
+    {ok, rabbit_mgmt_cors:set_headers(ReqData, Context), Context}.
+
+allowed_methods(ReqData, Context) ->
+    {['HEAD', 'GET', 'OPTIONS'], ReqData, Context}.
+
 content_types_provided(ReqData, Context) ->
    {[{"application/json", to_json}], ReqData, Context}.
 
+encodings_provided(ReqData, Context) ->
+    {[{"identity", fun(X) -> X end},
+     {"gzip", fun(X) -> zlib:gzip(X) end}], ReqData, Context}.
+
 to_json(ReqData, Context = #context{user = User}) ->
     rabbit_mgmt_util:reply(rabbit_mgmt_format:user(User), ReqData, Context).
 
similarity index 67%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbitmq_management.app.src
rename to deps/rabbitmq_management/src/rabbitmq_management.app.src
index bf542776e129fecf5df2d8e4828b283cfed002bb..6319f0fe1663e9793f7972b0952a72cea97a3e22 100644 (file)
@@ -1,6 +1,6 @@
 {application, rabbitmq_management,
  [{description, "RabbitMQ Management Console"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_mgmt_app, []}},
           %% List of {MaxAgeInSeconds, SampleEveryNSeconds}
           [{global,   [{605, 5}, {3660, 60}, {29400, 600}, {86400, 1800}]},
            {basic,    [{605, 5}, {3600, 60}]},
-           {detailed, [{10, 5}]}]}
+           {detailed, [{10, 5}]}]},
+         {process_stats_gc_timeout, 300000},
+         {stats_event_max_backlog, 250},
+         {cors_allow_origins, []},
+         {cors_max_age, 1800}
         ]},
-  {applications, [kernel, stdlib, rabbit, xmerl, rabbitmq_web_dispatch,
+  {applications, [kernel, stdlib, rabbit_common, rabbit, xmerl, rabbitmq_web_dispatch,
                   amqp_client, rabbitmq_management_agent]}]}.
diff --git a/deps/rabbitmq_management_agent/CODE_OF_CONDUCT.md b/deps/rabbitmq_management_agent/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_management_agent/CONTRIBUTING.md b/deps/rabbitmq_management_agent/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_management_agent/LICENSE b/deps/rabbitmq_management_agent/LICENSE
new file mode 100644 (file)
index 0000000..32fbd2a
--- /dev/null
@@ -0,0 +1,5 @@
+This package, the RabbitMQ Management Plugin is licensed under the MPL. For the
+MPL, please see LICENSE-MPL-RabbitMQ.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_management_agent/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_management_agent/LICENSE-MPL-RabbitMQ
new file mode 100644 (file)
index 0000000..e163fcc
--- /dev/null
@@ -0,0 +1,455 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the NPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is RabbitMQ Management Plugin.
+
+     The Initial Developer of the Original Code is GoPivotal, Inc.
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
+
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
diff --git a/deps/rabbitmq_management_agent/Makefile b/deps/rabbitmq_management_agent/Makefile
new file mode 100644 (file)
index 0000000..79f4f1c
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_management_agent
+
+DEPS = rabbit_common rabbit
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_management_agent/erlang.mk b/deps/rabbitmq_management_agent/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_management_agent/rabbitmq-components.mk b/deps/rabbitmq_management_agent/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_agent_app.erl
rename to deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl
index 6220ac671258538e28778aa89c92777e85e59266..11c181a74a8429549054d2b1dd0c1c01aecdb452 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_agent_app).
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_agent_sup.erl
rename to deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl
index cd0635fbe49ce669400a0220ab246a76d37d3c5f..26adfe8fb312113dd53e6c799d38e3959bf9be3f 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_agent_sup).
similarity index 81%
rename from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_db_handler.erl
rename to deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl
index 5bd9bc01d15dc56d1ce4229aaa8a5f8a854a524a..95545a3d0c23d6dcade4b0030a7bcc96a1802dc1 100644 (file)
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_db_handler).
 
+-include_lib("rabbit_common/include/rabbit.hrl").
+
 %% Make sure our database is hooked in *before* listening on the network or
 %% recovering queues (i.e. so there can't be any events fired before it starts).
 -rabbit_boot_step({rabbit_mgmt_db_handler,
@@ -93,8 +95,17 @@ init([]) ->
 handle_call(_Request, State) ->
     {ok, not_understood, State}.
 
+handle_event(#event{type = Type} = Event, State) when Type == channel_stats;
+                                                      Type == channel_created;
+                                                      Type == channel_closed ->
+    gen_server:cast({global, rabbit_mgmt_channel_stats_collector}, {event, Event}),
+    {ok, State};
+handle_event(#event{type = Type} = Event, State) when Type == queue_stats;
+                                                      Type == queue_deleted ->
+    gen_server:cast({global, rabbit_mgmt_queue_stats_collector}, {event, Event}),
+    {ok, State};
 handle_event(Event, State) ->
-    gen_server:cast({global, rabbit_mgmt_db}, {event, Event}),
+    gen_server:cast({global, rabbit_mgmt_event_collector}, {event, Event}),
     {ok, State}.
 
 handle_info(_Info, State) ->
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbit_mgmt_external_stats.erl
rename to deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl
index 62f783bfdf41a39ad309b955bb819a609726f331..294cf6782559cd1309821916cfb3d2a5c29dd1d7 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Management Console.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mgmt_external_stats).
                uptime, run_queue, processors, exchange_types,
                auth_mechanisms, applications, contexts,
                log_file, sasl_log_file, db_dir, config_files, net_ticktime,
-               enabled_plugins, persister_stats]).
+               enabled_plugins, persister_stats, gc_num, gc_bytes_reclaimed,
+               context_switches]).
 
 %%--------------------------------------------------------------------
 
--record(state, {fd_total, fhc_stats, fhc_stats_derived, node_owners}).
+-record(state, {
+    fd_total,
+    fhc_stats,
+    node_owners,
+    last_ts
+}).
 
 %%--------------------------------------------------------------------
 
@@ -196,7 +202,16 @@ i(auth_mechanisms, _State) ->
       fun (N) -> lists:member(list_to_atom(binary_to_list(N)), Mechanisms) end);
 i(applications,    _State) ->
     [format_application(A) ||
-        A <- lists:keysort(1, rabbit_misc:which_applications())].
+        A <- lists:keysort(1, rabbit_misc:which_applications())];
+i(gc_num, _State) ->
+    {GCs, _, _} = erlang:statistics(garbage_collection),
+    GCs;
+i(gc_bytes_reclaimed, _State) ->
+    {_, Words, _} = erlang:statistics(garbage_collection),
+    Words * erlang:system_info(wordsize);
+i(context_switches, _State) ->
+    {Sw, 0} = erlang:statistics(context_switches),
+    Sw.
 
 log_location(Type) ->
     case rabbit:log_location(Type) of
@@ -227,11 +242,8 @@ set_plugin_name(Name, Module) ->
     [{name, list_to_binary(atom_to_list(Name))} |
      proplists:delete(name, Module:description())].
 
-persister_stats(#state{fhc_stats         = FHC,
-                       fhc_stats_derived = FHCD}) ->
-    [{flatten_key(K), V} || {{_Op, Type} = K, V} <- FHC,
-                            Type =/= time] ++
-        [{flatten_key(K), V} || {K, V} <- FHCD].
+persister_stats(#state{fhc_stats = FHC}) ->
+    [{flatten_key(K), V} || {{_Op, _Type} = K, V} <- FHC].
 
 flatten_key({A, B}) ->
     list_to_atom(atom_to_list(A) ++ "_" ++ atom_to_list(B)).
@@ -345,7 +357,8 @@ code_change(_, State, _) -> {ok, State}.
 
 emit_update(State0) ->
     State = update_state(State0),
-    rabbit_event:notify(node_stats, infos(?KEYS, State)),
+    Stats = infos(?KEYS, State),
+    rabbit_event:notify(node_stats, Stats),
     erlang:send_after(?REFRESH_RATIO, self(), emit_update),
     emit_node_node_stats(State).
 
@@ -362,20 +375,8 @@ emit_node_node_stats(State = #state{node_owners = Owners}) ->
         {Node, _Owner, Stats} <- Links],
     State#state{node_owners = NewOwners}.
 
-update_state(State0 = #state{fhc_stats = FHC0}) ->
+update_state(State0) ->
+    %% Store raw data, the average operation time is calculated during querying
+    %% from the accumulated total
     FHC = file_handle_cache_stats:get(),
-    Avgs = [{{Op, avg_time}, avg_op_time(Op, V, FHC, FHC0)}
-            || {{Op, time}, V} <- FHC],
-    State0#state{fhc_stats         = FHC,
-                 fhc_stats_derived = Avgs}.
-
--define(MICRO_TO_MILLI, 1000).
-
-avg_op_time(Op, Time, FHC, FHC0) ->
-    Time0 = pget({Op, time}, FHC0),
-    TimeDelta = Time - Time0,
-    OpDelta = pget({Op, count}, FHC) - pget({Op, count}, FHC0),
-    case OpDelta of
-        0 -> 0;
-        _ -> (TimeDelta / OpDelta) / ?MICRO_TO_MILLI
-    end.
+    State0#state{fhc_stats = FHC}.
similarity index 68%
rename from rabbitmq-server/plugins-src/rabbitmq-management-agent/src/rabbitmq_management_agent.app.src
rename to deps/rabbitmq_management_agent/src/rabbitmq_management_agent.app.src
index bd4b8261f7a22fc4ee56d1f153ac6533d399042a..79901577943861f54a2e6cee7dd4870b6ce40c4a 100644 (file)
@@ -1,8 +1,8 @@
 {application, rabbitmq_management_agent,
  [{description, "RabbitMQ Management Agent"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_mgmt_agent_app, []}},
   {env, []},
-  {applications, [kernel, stdlib, rabbit]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit]}]}.
diff --git a/deps/rabbitmq_management_visualiser/CODE_OF_CONDUCT.md b/deps/rabbitmq_management_visualiser/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_management_visualiser/CONTRIBUTING.md b/deps/rabbitmq_management_visualiser/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/LICENSE-MPL-RabbitMQ
rename to deps/rabbitmq_management_visualiser/LICENSE-MPL-RabbitMQ
index bbb7f541713fb7009cb4515b1d36d8da7f14d78c..5c045b1d3074f9b10fee60dff3060bf2efc5b22a 100644 (file)
@@ -447,7 +447,7 @@ EXHIBIT A -Mozilla Public License.
      The Original Code is RabbitMQ Visualiser.
 
      The Initial Developer of the Original Code is GoPivotal, Inc.
-     Copyright (c) 2011-2014 GoPivotal, Inc.  All rights reserved.''
+     Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.''
 
      [NOTE: The text of this Exhibit A may differ slightly from the text of
      the notices in the Source Code files of the Original Code. You should
diff --git a/deps/rabbitmq_management_visualiser/Makefile b/deps/rabbitmq_management_visualiser/Makefile
new file mode 100644 (file)
index 0000000..600e846
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_management_visualiser
+
+DEPS = rabbit_common rabbit rabbitmq_management webmachine
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_management_visualiser/erlang.mk b/deps/rabbitmq_management_visualiser/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_management_visualiser/rabbitmq-components.mk b/deps/rabbitmq_management_visualiser/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/src/rabbit_mgmt_wm_all.erl
rename to deps/rabbitmq_management_visualiser/src/rabbit_mgmt_wm_all.erl
index 9ffa234262cadb61138bcfdb19a0abf2dcce0e79..d76da5b8a84a6bcf1b4c55bd49da51fe95fadd9e 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Visualiser.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2011-2014 GoPivotal, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 -module(rabbit_mgmt_wm_all).
 
 -export([init/1, to_json/2, content_types_provided/2, is_authorized/2,
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/src/rabbit_visualiser_mgmt.erl
rename to deps/rabbitmq_management_visualiser/src/rabbit_visualiser_mgmt.erl
index a5aecc6b01e930d5e91d5aca87a5a4686a88d6c4..27992dd864ada2914d565cd23a46387346ef5e0a 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ Visualiser.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2011-2014 GoPivotal, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_visualiser_mgmt).
similarity index 55%
rename from rabbitmq-server/plugins-src/rabbitmq-management-visualiser/src/rabbitmq_management_visualiser.app.src
rename to deps/rabbitmq_management_visualiser/src/rabbitmq_management_visualiser.app.src
index 82ec731405658a898db1fa20e6cca56d0791d972..28f70fec06154ab71f186148992dc67d32d51610 100644 (file)
@@ -1,6 +1,6 @@
 {application, rabbitmq_management_visualiser,
  [{description, "RabbitMQ Visualiser"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
-  {applications, [kernel, stdlib, rabbit, rabbitmq_management]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_management]}]}.
diff --git a/deps/rabbitmq_mqtt/CODE_OF_CONDUCT.md b/deps/rabbitmq_mqtt/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_mqtt/CONTRIBUTING.md b/deps/rabbitmq_mqtt/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_mqtt/Makefile b/deps/rabbitmq_mqtt/Makefile
new file mode 100644 (file)
index 0000000..218bd34
--- /dev/null
@@ -0,0 +1,22 @@
+PROJECT = rabbitmq_mqtt
+
+DEPS = ranch rabbit_common rabbit amqp_client
+TEST_DEPS = emqttc ct_helper rabbitmq_ct_helpers
+
+dep_ct_helper = git https://github.com/extend/ct_helper.git master
+dep_emqttc = git https://github.com/emqtt/emqttc.git master
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
+
+
+clean::
+       if test -d test/java_SUITE_data; then cd test/java_SUITE_data && make clean; fi
diff --git a/deps/rabbitmq_mqtt/README.md b/deps/rabbitmq_mqtt/README.md
new file mode 100644 (file)
index 0000000..bc119ff
--- /dev/null
@@ -0,0 +1,38 @@
+# RabbitMQ MQTT Plugin
+
+## Getting Started
+
+This is an MQTT plugin for RabbitMQ.
+
+The plugin is included in the RabbitMQ distribution.  To enable
+it, use <href="http://www.rabbitmq.com/man/rabbitmq-plugins.1.man.html">rabbitmq-plugins</a>:
+
+    rabbitmq-plugins enable rabbitmq_mqtt
+
+Default port used by the plugin is `1883`.
+
+## Documentation
+
+[MQTT plugin documentation](http://www.rabbitmq.com/mqtt.html) is available
+from rabbitmq.com.
+
+## Contributing
+
+See [CONTRIBUTING.md](https://github.com/rabbitmq/rabbitmq-mqtt/blob/master/CONTRIBUTING.md).
+
+### Running Tests
+
+After cloning RabbitMQ umbrella repository, change into the `rabbitmq-mqtt` directory
+and run
+
+    make tests
+
+This will bring up a RabbitMQ node with the plugin enabled and run integration tests
+against it. Note that there must be no other MQTT server running on ports `1883` and `8883`.
+
+## Copyright and License
+
+(c) 2007 â€” 2016 Pivotal Software, Inc.
+
+Released under the [Mozilla Public License](http://www.rabbitmq.com/mpl.html),
+the same as RabbitMQ.
diff --git a/deps/rabbitmq_mqtt/erlang.mk b/deps/rabbitmq_mqtt/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 63%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/include/rabbit_mqtt.hrl
rename to deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl
index b620a31cd2b98e377670b00f4ae7d4c9540b2221..dbc99283e8953ca33b0a67b2cfd4e2117435ee1b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -define(CLIENT_ID_MAXLEN, 23).
 -record(state,      { socket,
                       conn_name,
                       await_recv,
+                      deferred_recv,
+                      received_connect_frame,
                       connection_state,
                       keepalive,
                       keepalive_sup,
                       conserve,
                       parse_state,
-                      proc_state }).
+                      proc_state,
+                      connection,
+                      stats_timer }).
 
 %% processor state
 -record(proc_state, { socket,
                       channels,
                       connection,
                       exchange,
-                      ssl_login_name }).
+                      adapter_info,
+                      ssl_login_name,
+                      %% Retained messages handler. See rabbit_mqtt_retainer_sup
+                      %% and rabbit_mqtt_retainer.
+                      retainer_pid,
+                      auth_state,
+                      send_fun}).
+
+-record(auth_state, {username,
+                     user,
+                     vhost}).
+
+%% does not include vhost: it is used in
+%% the table name
+-record(retained_message, {topic,
+                           mqtt_msg}).
similarity index 82%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/include/rabbit_mqtt_frame.hrl
rename to deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl
index 968f98641e915f72bbe118c0921352a563761b15..110199dfeeee3ce5dbbecedfa01ceaf32bbd9ee7 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -define(PROTOCOL_NAMES,  [{3, "MQIsdp"}, {4, "MQTT"}]).
@@ -48,6 +48,9 @@
 -define(QOS_1, 1).
 -define(QOS_2, 2).
 
+%% TODO
+-type message_id() :: any().
+
 -record(mqtt_frame, {fixed,
                      variable,
                      payload}).
@@ -69,7 +72,8 @@
                               username,
                               password}).
 
--record(mqtt_frame_connack,  {return_code}).
+-record(mqtt_frame_connack,  {session_present,
+                              return_code}).
 
 -record(mqtt_frame_publish,  {topic_name,
                               message_id}).
 
 -record(mqtt_frame_other,    {other}).
 
--record(mqtt_msg,            {retain,
-                              qos,
-                              topic,
-                              dup,
-                              message_id,
-                              payload}).
+-record(mqtt_msg,            {retain :: boolean(),
+                              qos :: ?QOS_0 | ?QOS_1 | ?QOS_2,
+                              topic :: string(),
+                              dup :: boolean(),
+                              message_id :: message_id(),
+                              payload :: binary()}).
+
+-type mqtt_msg() :: #mqtt_msg{}.
diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl
new file mode 100644 (file)
index 0000000..8267ce5
--- /dev/null
@@ -0,0 +1,16 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
diff --git a/deps/rabbitmq_mqtt/rabbitmq-components.mk b/deps/rabbitmq_mqtt/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 68%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt.erl
index 92c2916e3f9198f667ae5cb4927b1940c65282dc..d69d9165f9aa343fc37585d440c15412f4de730b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt).
 start(normal, []) ->
     {ok, Listeners} = application:get_env(tcp_listeners),
     {ok, SslListeners} = application:get_env(ssl_listeners),
-    rabbit_mqtt_sup:start_link({Listeners, SslListeners}, []).
+    Result = rabbit_mqtt_sup:start_link({Listeners, SslListeners}, []),
+    EMPid = case rabbit_event:start_link() of
+              {ok, Pid}                       -> Pid;
+              {error, {already_started, Pid}} -> Pid
+            end,
+    gen_event:add_handler(EMPid, rabbit_mqtt_vhost_event_handler, []),
+    Result.
 
 stop(_State) ->
     ok.
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_collector.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl
index 52a3da9cf43574e35b12d1dd9cf65c74ccff5f4a..50361efa0851ec9fec6772eff02e3a8f9dc1925a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_collector).
similarity index 74%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_connection_sup.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl
index fd083a4cf6303d1a99665b91ce9cdcaa6fcdcf9c..a0ffec232618bf122ff65d52944c805f1d2032ba 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_connection_sup).
 
 -behaviour(supervisor2).
+-behaviour(ranch_protocol).
 
--define(MAX_WAIT, 16#ffffffff).
+-include_lib("rabbit_common/include/rabbit.hrl").
 
--export([start_link/0, start_keepalive_link/0]).
+-export([start_link/4, start_keepalive_link/0]).
 
 -export([init/1]).
 
 %%----------------------------------------------------------------------------
 
-start_link() ->
+start_link(Ref, Sock, _Transport, []) ->
     {ok, SupPid} = supervisor2:start_link(?MODULE, []),
-    {ok, ReaderPid} = supervisor2:start_child(
-                        SupPid,
-                        {rabbit_mqtt_reader,
-                         {rabbit_mqtt_reader, start_link, []},
-                         intrinsic, ?MAX_WAIT, worker, [rabbit_mqtt_reader]}),
     {ok, KeepaliveSup} = supervisor2:start_child(
                           SupPid,
-                          {rabbit_keepalive_sup,
+                          {rabbit_mqtt_keepalive_sup,
                            {rabbit_mqtt_connection_sup, start_keepalive_link, []},
                            intrinsic, infinity, supervisor, [rabbit_keepalive_sup]}),
-    {ok, SupPid, {KeepaliveSup, ReaderPid}}.
+    {ok, ReaderPid} = supervisor2:start_child(
+                        SupPid,
+                        {rabbit_mqtt_reader,
+                         {rabbit_mqtt_reader, start_link, [KeepaliveSup, Ref, Sock]},
+                         intrinsic, ?WORKER_WAIT, worker, [rabbit_mqtt_reader]}),
+    {ok, SupPid, ReaderPid}.
 
 start_keepalive_link() ->
     supervisor2:start_link(?MODULE, []).
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_frame.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl
index 628241144bac293ed8887779e1e490bd42536592..0b80925eb6fbfcffb9bd333fb3164b1e701843cb 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_frame).
@@ -164,9 +164,10 @@ serialise_payload(undefined)           -> <<>>;
 serialise_payload(B) when is_binary(B) -> B.
 
 serialise_variable(#mqtt_frame_fixed   { type        = ?CONNACK } = Fixed,
-                   #mqtt_frame_connack { return_code = ReturnCode },
+                   #mqtt_frame_connack { session_present = SessionPresent,
+                                         return_code = ReturnCode },
                    <<>> = PayloadBin) ->
-    VariableBin = <<?RESERVED:8, ReturnCode:8>>,
+    VariableBin = <<?RESERVED:7, (opt(SessionPresent)):1, ReturnCode:8>>,
     serialise_fixed(Fixed, VariableBin, PayloadBin);
 
 serialise_variable(#mqtt_frame_fixed  { type       = SubAck } = Fixed,
similarity index 55%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_processor.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl
index 5c51a8bf0b0dca72be66072a55f2c067467701cb..f2047942d260b358bc3c214a1c370930d2ca648b 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_processor).
 
--export([info/2, initial_state/2,
+-export([info/2, initial_state/2, initial_state/4,
          process_frame/2, amqp_pub/2, amqp_callback/2, send_will/1,
          close_connection/1]).
 
+%% for testing purposes
+-export([get_vhost_username/1]).
+
 -include_lib("amqp_client/include/amqp_client.hrl").
 -include("rabbit_mqtt_frame.hrl").
 -include("rabbit_mqtt.hrl").
 -define(FRAME_TYPE(Frame, Type),
         Frame = #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }}).
 
-initial_state(Socket,SSLLoginName) ->
-    #proc_state{ unacked_pubs  = gb_trees:empty(),
-                 awaiting_ack  = gb_trees:empty(),
-                 message_id    = 1,
-                 subscriptions = dict:new(),
-                 consumer_tags = {undefined, undefined},
-                 channels      = {undefined, undefined},
-                 exchange      = rabbit_mqtt_util:env(exchange),
-                 socket        = Socket,
-                 ssl_login_name = SSLLoginName }.
+initial_state(Socket, SSLLoginName) ->
+    initial_state(Socket, SSLLoginName,
+        adapter_info(Socket, 'MQTT'),
+        fun send_client/2).
+
+initial_state(Socket, SSLLoginName,
+              AdapterInfo0 = #amqp_adapter_info{additional_info = Extra},
+              SendFun) ->
+    %% MQTT connections use exactly one channel. The frame max is not
+    %% applicable and there is no way to know what client is used.
+    AdapterInfo = AdapterInfo0#amqp_adapter_info{additional_info = [
+        {channels, 1},
+        {channel_max, 1},
+        {frame_max, 0},
+        {client_properties,
+         [{<<"product">>, longstr, <<"MQTT client">>}]} | Extra]},
+    #proc_state{ unacked_pubs   = gb_trees:empty(),
+                 awaiting_ack   = gb_trees:empty(),
+                 message_id     = 1,
+                 subscriptions  = dict:new(),
+                 consumer_tags  = {undefined, undefined},
+                 channels       = {undefined, undefined},
+                 exchange       = rabbit_mqtt_util:env(exchange),
+                 socket         = Socket,
+                 adapter_info   = AdapterInfo,
+                 ssl_login_name = SSLLoginName,
+                 send_fun       = SendFun }.
 
 info(client_id, #proc_state{ client_id = ClientId }) -> ClientId.
 
@@ -47,22 +67,26 @@ process_frame(#mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }},
     {error, connect_expected, PState};
 process_frame(Frame = #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }},
               PState) ->
-    process_request(Type, Frame, PState).
+    case process_request(Type, Frame, PState) of
+        {ok, PState1} -> {ok, PState1, PState1#proc_state.connection};
+        Ret -> Ret
+    end.
 
 process_request(?CONNECT,
                 #mqtt_frame{ variable = #mqtt_frame_connect{
-                                          username   = Username,
-                                          password   = Password,
-                                          proto_ver  = ProtoVersion,
-                                          clean_sess = CleanSess,
-                                          client_id  = ClientId0,
-                                          keep_alive = Keepalive} = Var},
-                PState = #proc_state{ ssl_login_name = SSLLoginName }) ->
+                                           username   = Username,
+                                           password   = Password,
+                                           proto_ver  = ProtoVersion,
+                                           clean_sess = CleanSess,
+                                           client_id  = ClientId0,
+                                           keep_alive = Keepalive} = Var},
+                PState = #proc_state{ ssl_login_name = SSLLoginName,
+                                      send_fun = SendFun }) ->
     ClientId = case ClientId0 of
                    []    -> rabbit_mqtt_util:gen_client_id();
                    [_|_] -> ClientId0
                end,
-    {ReturnCode, PState1} =
+    {Return, PState1} =
         case {lists:member(ProtoVersion, proplists:get_keys(?PROTOCOL_NAMES)),
               ClientId0 =:= [] andalso CleanSess =:= false} of
             {false, _} ->
@@ -72,11 +96,19 @@ process_request(?CONNECT,
             _ ->
                 case creds(Username, Password, SSLLoginName) of
                     nocreds ->
-                        rabbit_log:error("MQTT login failed - no credentials~n"),
+                        rabbit_log:error("MQTT login failed: no credentials provided~n"),
+                        {?CONNACK_CREDENTIALS, PState};
+                    {invalid_creds, {undefined, Pass}} when is_list(Pass) ->
+                        rabbit_log:error("MQTT login failed: no user username is provided"),
+                        {?CONNACK_CREDENTIALS, PState};
+                    {invalid_creds, {User, undefined}} when is_list(User) ->
+                        rabbit_log:error("MQTT login failed for ~p: no password provided", [User]),
                         {?CONNACK_CREDENTIALS, PState};
                     {UserBin, PassBin} ->
                         case process_login(UserBin, PassBin, ProtoVersion, PState) of
-                            {?CONNACK_ACCEPT, Conn} ->
+                            {?CONNACK_ACCEPT, Conn, VHost, AState} ->
+                                 RetainerPid =
+                                   rabbit_mqtt_retainer_sup:child_for_vhost(VHost),
                                 link(Conn),
                                 {ok, Ch} = amqp_connection:open_channel(Conn),
                                 link(Ch),
@@ -87,21 +119,31 @@ process_request(?CONNECT,
                                 #'basic.qos_ok'{} = amqp_channel:call(
                                   Ch, #'basic.qos'{prefetch_count = Prefetch}),
                                 rabbit_mqtt_reader:start_keepalive(self(), Keepalive),
-                                {?CONNACK_ACCEPT,
-                                 maybe_clean_sess(
-                                   PState #proc_state{ will_msg   = make_will_msg(Var),
-                                                       clean_sess = CleanSess,
-                                                       channels   = {Ch, undefined},
-                                                       connection = Conn,
-                                                       client_id  = ClientId })};
+                                {SP, ProcState} =
+                                    maybe_clean_sess(
+                                        PState #proc_state{
+                                            will_msg   = make_will_msg(Var),
+                                            clean_sess = CleanSess,
+                                            channels   = {Ch, undefined},
+                                            connection = Conn,
+                                            client_id  = ClientId,
+                                            retainer_pid = RetainerPid,
+                                            auth_state = AState}),
+                                {{?CONNACK_ACCEPT, SP}, ProcState};
                             ConnAck ->
                                 {ConnAck, PState}
                         end
                 end
         end,
-    send_client(#mqtt_frame{ fixed    = #mqtt_frame_fixed{ type = ?CONNACK},
-                             variable = #mqtt_frame_connack{
-                                         return_code = ReturnCode }}, PState1),
+    {ReturnCode, SessionPresent} = case Return of
+        {?CONNACK_ACCEPT, _} = Return -> Return;
+        Return                        -> {Return, false}
+    end,
+    SendFun(#mqtt_frame{ fixed    = #mqtt_frame_fixed{ type = ?CONNACK},
+                         variable = #mqtt_frame_connack{
+                                     session_present = SessionPresent,
+                                     return_code = ReturnCode}},
+            PState1),
     {ok, PState1};
 
 process_request(?PUBACK,
@@ -109,10 +151,15 @@ process_request(?PUBACK,
                   variable = #mqtt_frame_publish{ message_id = MessageId }},
                 #proc_state{ channels     = {Channel, _},
                              awaiting_ack = Awaiting } = PState) ->
-    Tag = gb_trees:get(MessageId, Awaiting),
-    amqp_channel:cast(
-       Channel, #'basic.ack'{ delivery_tag = Tag }),
-    {ok, PState #proc_state{ awaiting_ack = gb_trees:delete( MessageId, Awaiting)}};
+    %% tag can be missing because of bogus clients and QoS downgrades
+    case gb_trees:is_defined(MessageId, Awaiting) of
+      false ->
+        {ok, PState};
+      true ->
+        Tag = gb_trees:get(MessageId, Awaiting),
+        amqp_channel:cast(Channel, #'basic.ack'{ delivery_tag = Tag }),
+        {ok, PState #proc_state{ awaiting_ack = gb_trees:delete( MessageId, Awaiting)}}
+    end;
 
 process_request(?PUBLISH,
                 #mqtt_frame{
@@ -125,43 +172,65 @@ process_request(?PUBLISH,
                                              dup    = Dup },
                   variable = #mqtt_frame_publish{ topic_name = Topic,
                                                   message_id = MessageId },
-                  payload = Payload }, PState) ->
-    {ok, amqp_pub(#mqtt_msg{ retain     = Retain,
-                             qos        = Qos,
-                             topic      = Topic,
-                             dup        = Dup,
-                             message_id = MessageId,
-                             payload    = Payload }, PState)};
+                  payload = Payload },
+                  PState = #proc_state{retainer_pid = RPid}) ->
+    check_publish_or_die(Topic, fun() ->
+        Msg = #mqtt_msg{retain     = Retain,
+                        qos        = Qos,
+                        topic      = Topic,
+                        dup        = Dup,
+                        message_id = MessageId,
+                        payload    = Payload},
+        Result = amqp_pub(Msg, PState),
+        case Retain of
+          false -> ok;
+          true  -> hand_off_to_retainer(RPid, Topic, Msg)
+        end,
+        {ok, Result}
+    end, PState);
 
 process_request(?SUBSCRIBE,
                 #mqtt_frame{
-                  variable = #mqtt_frame_subscribe{ message_id  = MessageId,
-                                                    topic_table = Topics },
-                  payload = undefined },
-                #proc_state{ channels = {Channel, _},
-                             exchange = Exchange} = PState0) ->
-    {QosResponse, PState1} =
-        lists:foldl(fun (#mqtt_topic{ name = TopicName,
-                                       qos  = Qos }, {QosList, PState}) ->
-                       SupportedQos = supported_subs_qos(Qos),
-                       {Queue, #proc_state{ subscriptions = Subs } = PState1} =
-                           ensure_queue(SupportedQos, PState),
-                       Binding = #'queue.bind'{
-                                   queue       = Queue,
-                                   exchange    = Exchange,
-                                   routing_key = rabbit_mqtt_util:mqtt2amqp(
-                                                   TopicName)},
-                       #'queue.bind_ok'{} = amqp_channel:call(Channel, Binding),
-                       {[SupportedQos | QosList],
-                        PState1 #proc_state{ subscriptions =
-                                             dict:append(TopicName, SupportedQos, Subs) }}
-                   end, {[], PState0}, Topics),
-    send_client(#mqtt_frame{ fixed    = #mqtt_frame_fixed{ type = ?SUBACK },
-                             variable = #mqtt_frame_suback{
-                                         message_id = MessageId,
-                                         qos_table  = QosResponse }}, PState1),
-
-    {ok, PState1};
+                  variable = #mqtt_frame_subscribe{
+                              message_id  = MessageId,
+                              topic_table = Topics},
+                  payload = undefined},
+                #proc_state{channels = {Channel, _},
+                            exchange = Exchange,
+                            retainer_pid = RPid,
+                            send_fun = SendFun } = PState0) ->
+    check_subscribe_or_die(Topics, fun() ->
+        {QosResponse, PState1} =
+            lists:foldl(fun (#mqtt_topic{name = TopicName,
+                                         qos  = Qos}, {QosList, PState}) ->
+                           SupportedQos = supported_subs_qos(Qos),
+                           {Queue, #proc_state{subscriptions = Subs} = PState1} =
+                               ensure_queue(SupportedQos, PState),
+                           Binding = #'queue.bind'{
+                                       queue       = Queue,
+                                       exchange    = Exchange,
+                                       routing_key = rabbit_mqtt_util:mqtt2amqp(
+                                                       TopicName)},
+                           #'queue.bind_ok'{} = amqp_channel:call(Channel, Binding),
+                           {[SupportedQos | QosList],
+                            PState1 #proc_state{subscriptions =
+                                                dict:append(TopicName, SupportedQos, Subs)}}
+                       end, {[], PState0}, Topics),
+        SendFun(#mqtt_frame{fixed    = #mqtt_frame_fixed{type = ?SUBACK},
+                            variable = #mqtt_frame_suback{
+                                        message_id = MessageId,
+                                        qos_table  = QosResponse}}, PState1),
+        %% we may need to send up to length(Topics) messages.
+        %% if QoS is > 0 then we need to generate a message id,
+        %% and increment the counter.
+        N = lists:foldl(fun (Topic, Acc) ->
+                          case maybe_send_retained_message(RPid, Topic, Acc, PState1) of
+                            {true, X} -> Acc + X;
+                            false     -> Acc
+                          end
+                        end, MessageId, Topics),
+        {ok, PState1#proc_state{message_id = N}}
+    end, PState0);
 
 process_request(?UNSUBSCRIBE,
                 #mqtt_frame{
@@ -170,7 +239,8 @@ process_request(?UNSUBSCRIBE,
                   payload = undefined }, #proc_state{ channels      = {Channel, _},
                                                       exchange      = Exchange,
                                                       client_id     = ClientId,
-                                                      subscriptions = Subs0} = PState) ->
+                                                      subscriptions = Subs0,
+                                                      send_fun      = SendFun } = PState) ->
     Queues = rabbit_mqtt_util:subcription_queue_name(ClientId),
     Subs1 =
     lists:foldl(
@@ -191,13 +261,13 @@ process_request(?UNSUBSCRIBE,
           end, QosSubs),
         dict:erase(TopicName, Subs)
       end, Subs0, Topics),
-    send_client(#mqtt_frame{ fixed    = #mqtt_frame_fixed { type       = ?UNSUBACK },
-                             variable = #mqtt_frame_suback{ message_id = MessageId }},
+    SendFun(#mqtt_frame{ fixed    = #mqtt_frame_fixed { type       = ?UNSUBACK },
+                         variable = #mqtt_frame_suback{ message_id = MessageId }},
                 PState),
     {ok, PState #proc_state{ subscriptions = Subs1 }};
 
-process_request(?PINGREQ, #mqtt_frame{}, PState) ->
-    send_client(#mqtt_frame{ fixed = #mqtt_frame_fixed{ type = ?PINGRESP }},
+process_request(?PINGREQ, #mqtt_frame{}, #proc_state{ send_fun = SendFun } = PState) ->
+    SendFun(#mqtt_frame{ fixed = #mqtt_frame_fixed{ type = ?PINGRESP }},
                 PState),
     {ok, PState};
 
@@ -206,6 +276,42 @@ process_request(?DISCONNECT, #mqtt_frame{}, PState) ->
 
 %%----------------------------------------------------------------------------
 
+hand_off_to_retainer(RetainerPid, Topic, #mqtt_msg{payload = <<"">>}) ->
+  rabbit_mqtt_retainer:clear(RetainerPid, Topic),
+  ok;
+hand_off_to_retainer(RetainerPid, Topic, Msg) ->
+  rabbit_mqtt_retainer:retain(RetainerPid, Topic, Msg),
+  ok.
+
+maybe_send_retained_message(RPid, #mqtt_topic{name = S, qos = SubscribeQos}, MsgId,
+                            #proc_state{ send_fun = SendFun } = PState) ->
+  case rabbit_mqtt_retainer:fetch(RPid, S) of
+    undefined -> false;
+    Msg       ->
+                %% calculate effective QoS as the lower value of SUBSCRIBE frame QoS
+                %% and retained message QoS. The spec isn't super clear on this, we
+                %% do what Mosquitto does, per user feedback.
+                Qos = erlang:min(SubscribeQos, Msg#mqtt_msg.qos),
+                Id = case Qos of
+                  ?QOS_0 -> undefined;
+                  ?QOS_1 -> MsgId
+                end,
+                SendFun(#mqtt_frame{fixed = #mqtt_frame_fixed{
+                    type = ?PUBLISH,
+                    qos  = Qos,
+                    dup  = false,
+                    retain = Msg#mqtt_msg.retain
+                 }, variable = #mqtt_frame_publish{
+                    message_id = Id,
+                    topic_name = S
+                 },
+                 payload = Msg#mqtt_msg.payload}, PState),
+                 case Qos of
+                   ?QOS_0 -> false;
+                   ?QOS_1 -> {true, 1}
+                 end
+  end.
+
 amqp_callback({#'basic.deliver'{ consumer_tag = ConsumerTag,
                                  delivery_tag = DeliveryTag,
                                  routing_key  = RoutingKey },
@@ -214,7 +320,8 @@ amqp_callback({#'basic.deliver'{ consumer_tag = ConsumerTag,
                DeliveryCtx} = Delivery,
               #proc_state{ channels      = {Channel, _},
                            awaiting_ack  = Awaiting,
-                           message_id    = MsgId } = PState) ->
+                           message_id    = MsgId,
+                           send_fun      = SendFun } = PState) ->
     amqp_channel:notify_received(DeliveryCtx),
     case {delivery_dup(Delivery), delivery_qos(ConsumerTag, Headers, PState)} of
         {true, {?QOS_0, ?QOS_1}} ->
@@ -224,7 +331,7 @@ amqp_callback({#'basic.deliver'{ consumer_tag = ConsumerTag,
         {true, {?QOS_0, ?QOS_0}} ->
             {ok, PState};
         {Dup, {DeliveryQos, _SubQos} = Qos}     ->
-            send_client(
+            SendFun(
               #mqtt_frame{ fixed = #mqtt_frame_fixed{
                                      type = ?PUBLISH,
                                      qos  = DeliveryQos,
@@ -256,11 +363,12 @@ amqp_callback({#'basic.deliver'{ consumer_tag = ConsumerTag,
     end;
 
 amqp_callback(#'basic.ack'{ multiple = true, delivery_tag = Tag } = Ack,
-              PState = #proc_state{ unacked_pubs = UnackedPubs }) ->
+              PState = #proc_state{ unacked_pubs = UnackedPubs,
+                                    send_fun     = SendFun }) ->
     case gb_trees:size(UnackedPubs) > 0 andalso
          gb_trees:take_smallest(UnackedPubs) of
         {TagSmall, MsgId, UnackedPubs1} when TagSmall =< Tag ->
-            send_client(
+            SendFun(
               #mqtt_frame{ fixed    = #mqtt_frame_fixed{ type = ?PUBACK },
                            variable = #mqtt_frame_publish{ message_id = MsgId }},
               PState),
@@ -270,8 +378,9 @@ amqp_callback(#'basic.ack'{ multiple = true, delivery_tag = Tag } = Ack,
     end;
 
 amqp_callback(#'basic.ack'{ multiple = false, delivery_tag = Tag },
-              PState = #proc_state{ unacked_pubs = UnackedPubs }) ->
-    send_client(
+              PState = #proc_state{ unacked_pubs = UnackedPubs,
+                                    send_fun     = SendFun }) ->
+    SendFun(
       #mqtt_frame{ fixed    = #mqtt_frame_fixed{ type = ?PUBACK },
                    variable = #mqtt_frame_publish{
                                 message_id = gb_trees:get(
@@ -302,9 +411,12 @@ delivery_qos(Tag, Headers,   #proc_state{ consumer_tags = {_, Tag} }) ->
         undefined   -> {?QOS_1, ?QOS_1}
     end.
 
-maybe_clean_sess(PState = #proc_state { clean_sess = false }) ->
+maybe_clean_sess(PState = #proc_state { clean_sess = false,
+                                        channels   = {Channel, _},
+                                        client_id  = ClientId }) ->
     {_Queue, PState1} = ensure_queue(?QOS_1, PState),
-    PState1;
+    SessionPresent = session_present(Channel, ClientId),
+    {SessionPresent, PState1};
 maybe_clean_sess(PState = #proc_state { clean_sess = true,
                                         connection = Conn,
                                         client_id  = ClientId }) ->
@@ -315,7 +427,16 @@ maybe_clean_sess(PState = #proc_state { clean_sess = true,
     catch
         exit:_Error -> ok
     end,
-    PState.
+    {false, PState}.
+
+session_present(Channel, ClientId)  ->
+    {_, QueueQ1} = rabbit_mqtt_util:subcription_queue_name(ClientId),
+    Declare = #'queue.declare'{queue   = QueueQ1,
+                               passive = true},
+    case amqp_channel:call(Channel, Declare) of
+        #'queue.declare_ok'{} -> true;
+        _                     -> false
+    end.
 
 %%----------------------------------------------------------------------------
 
@@ -332,23 +453,31 @@ make_will_msg(#mqtt_frame_connect{ will_retain = Retain,
                payload = Msg }.
 
 process_login(UserBin, PassBin, ProtoVersion,
-              #proc_state{ channels  = {undefined, undefined},
-                           socket    = Sock }) ->
+              #proc_state{ channels     = {undefined, undefined},
+                           socket       = Sock,
+                           adapter_info = AdapterInfo }) ->
     {VHost, UsernameBin} = get_vhost_username(UserBin),
     case amqp_connection:start(#amqp_params_direct{
                                   username     = UsernameBin,
                                   password     = PassBin,
                                   virtual_host = VHost,
-                                  adapter_info = adapter_info(Sock, ProtoVersion)}) of
+                                  adapter_info = set_proto_version(AdapterInfo, ProtoVersion)}) of
         {ok, Connection} ->
             case rabbit_access_control:check_user_loopback(UsernameBin, Sock) of
-                ok          -> {?CONNACK_ACCEPT, Connection};
-                not_allowed -> amqp_connection:close(Connection),
-                               rabbit_log:warning(
-                                 "MQTT login failed for ~p access_refused "
-                                 "(access must be from localhost)~n",
-                                 [binary_to_list(UsernameBin)]),
-                               ?CONNACK_AUTH
+                ok          ->
+                    [{internal_user, InternalUser}] = amqp_connection:info(
+                        Connection, [internal_user]),
+                    {?CONNACK_ACCEPT, Connection, VHost,
+                                      #auth_state{user = InternalUser,
+                                                  username = UsernameBin,
+                                                  vhost = VHost}};
+                not_allowed ->
+                    amqp_connection:close(Connection),
+                    rabbit_log:warning(
+                      "MQTT login failed for ~p access_refused "
+                      "(access must be from localhost)~n",
+                      [binary_to_list(UsernameBin)]),
+                    ?CONNACK_AUTH
             end;
         {error, {auth_failure, Explanation}} ->
             rabbit_log:error("MQTT login failed for ~p auth_failure: ~s~n",
@@ -362,10 +491,15 @@ process_login(UserBin, PassBin, ProtoVersion,
     end.
 
 get_vhost_username(UserBin) ->
-    %% split at the last colon, disallowing colons in username
-    case re:split(UserBin, ":(?!.*?:)") of
-        [Vhost, UserName] -> {Vhost,  UserName};
-        [UserBin]         -> {rabbit_mqtt_util:env(vhost), UserBin}
+    Default = {rabbit_mqtt_util:env(vhost), UserBin},
+    case application:get_env(?APP, ignore_colons_in_username) of
+        {ok, true} -> Default;
+        _ ->
+            %% split at the last colon, disallowing colons in username
+            case re:split(UserBin, ":(?!.*?:)") of
+                [Vhost, UserName] -> {Vhost,  UserName};
+                [UserBin]         -> Default
+            end
     end.
 
 creds(User, Pass, SSLLoginName) ->
@@ -373,23 +507,31 @@ creds(User, Pass, SSLLoginName) ->
     DefaultPass   = rabbit_mqtt_util:env(default_pass),
     {ok, Anon}    = application:get_env(?APP, allow_anonymous),
     {ok, TLSAuth} = application:get_env(?APP, ssl_cert_login),
-    U = case {User =/= undefined, is_binary(DefaultUser),
-              Anon =:= true, (TLSAuth andalso SSLLoginName =/= none)} of
-             {true,  _,    _,    _}     -> list_to_binary(User);
-             {false, _,    _,    true}  -> SSLLoginName;
-             {false, true, true, false} -> DefaultUser;
-             _                          -> nocreds
-        end,
-    case U of
-        nocreds ->
-            nocreds;
-        _ ->
-            case {Pass =/= undefined, is_binary(DefaultPass), Anon =:= true, SSLLoginName == U} of
-                 {true,  _,    _,    _} -> {U, list_to_binary(Pass)};
-                 {false, _,    _,    _} -> {U, none};
-                 {false, true, true, _} -> {U, DefaultPass};
-                 _                      -> {U, none}
-            end
+    HaveDefaultCreds = Anon =:= true andalso
+                       is_binary(DefaultUser) andalso
+                       is_binary(DefaultPass),
+
+    CredentialsProvided = User =/= undefined orelse
+                          Pass =/= undefined,
+
+    CorrectCredentials = is_list(User) andalso
+                         is_list(Pass),
+
+    SSLLoginProvided = TLSAuth =:= true andalso
+                       SSLLoginName =/= none,
+
+    case {CredentialsProvided, CorrectCredentials, SSLLoginProvided, HaveDefaultCreds} of
+        %% Username and password take priority
+        {true, true, _, _}          -> {list_to_binary(User),
+                                        list_to_binary(Pass)};
+        %% Either username or password is provided
+        {true, false, _, _}         -> {invalid_creds, {User, Pass}};
+        %% rabbitmq_mqtt.ssl_cert_login is true. SSL user name provided.
+        %% Authenticating using username only.
+        {false, false, true, _}     -> {SSLLoginName, none};
+        %% Anonymous connection uses default credentials
+        {false, false, false, true} -> {DefaultUser, DefaultPass};
+        _                           -> nocreds
     end.
 
 supported_subs_qos(?QOS_0) -> ?QOS_0;
@@ -429,7 +571,11 @@ ensure_queue(Qos, #proc_state{ channels      = {Channel, _},
                 {QueueQ1,
                  #'queue.declare'{ queue       = QueueQ1,
                                    durable     = true,
-                                   auto_delete = false,
+                                   %% Clean session means a transient connection,
+                                   %% translating into auto-delete.
+                                   %%
+                                   %% see rabbitmq/rabbitmq-mqtt#37
+                                   auto_delete = CleanSess,
                                    arguments   = Qos1Args },
                  #'basic.consume'{ queue  = QueueQ1,
                                    no_ack = false }};
@@ -493,9 +639,12 @@ amqp_pub(#mqtt_msg{ qos        = Qos,
     PState #proc_state{ unacked_pubs   = UnackedPubs1,
                         awaiting_seqno = SeqNo1 }.
 
-adapter_info(Sock, ProtoVer) ->
-    amqp_connection:socket_adapter_info(
-             Sock, {'MQTT', human_readable_mqtt_version(ProtoVer)}).
+adapter_info(Sock, ProtoName) ->
+    amqp_connection:socket_adapter_info(Sock, {ProtoName, "N/A"}).
+
+set_proto_version(AdapterInfo = #amqp_adapter_info{protocol = {Proto, _}}, Vsn) ->
+    AdapterInfo#amqp_adapter_info{protocol = {Proto,
+        human_readable_mqtt_version(Vsn)}}.
 
 human_readable_mqtt_version(3) ->
     "3.1.0";
@@ -521,3 +670,31 @@ close_connection(PState = #proc_state{ connection = Connection,
     catch amqp_connection:close(Connection),
     PState #proc_state{ channels   = {undefined, undefined},
                         connection = undefined }.
+
+% NB: check_*_or_die: MQTT spec says we should ack normally, ie pretend there
+% was no auth error, but here we are closing the connection with an error. This
+% is what happens anyway if there is an authorization failure at the AMQP level.
+
+check_publish_or_die(TopicName, Fn, PState) ->
+  case check_topic_access(TopicName, write, PState) of
+    ok -> Fn();
+    _ -> {err, unauthorized, PState}
+  end.
+
+check_subscribe_or_die([], Fn, _) ->
+  Fn();
+
+check_subscribe_or_die([#mqtt_topic{name = TopicName} | Topics], Fn, PState) ->
+  case check_topic_access(TopicName, read, PState) of
+    ok -> check_subscribe_or_die(Topics, Fn, PState);
+    _ -> {err, unauthorized, PState}
+  end.
+
+check_topic_access(TopicName, Access,
+                   #proc_state{
+                      auth_state = #auth_state{user = User,
+                                               vhost = VHost}}) ->
+  Resource = #resource{virtual_host = VHost,
+                       kind = topic,
+                       name = TopicName},
+  rabbit_access_control:check_resource_access(User, Resource, Access).
similarity index 62%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_reader.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl
index 30cf03288a612dfab7ff95ef24c4213a0707c821..7df1a14788d6e0954a03aa547fec1e23973672e0 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_reader).
 -behaviour(gen_server2).
 
--export([start_link/0]).
+-export([start_link/3]).
 -export([init/1, handle_call/3, handle_cast/2, handle_info/2,
          code_change/3, terminate/2]).
 
 -export([conserve_resources/3, start_keepalive/2]).
 
+-export([ssl_login_name/1]).
+
 -include_lib("amqp_client/include/amqp_client.hrl").
 -include("rabbit_mqtt.hrl").
 
 %%----------------------------------------------------------------------------
 
-start_link() ->
-    gen_server2:start_link(?MODULE, [], []).
+start_link(KeepaliveSup, Ref, Sock) ->
+    Pid = proc_lib:spawn_link(?MODULE, init,
+                              [[KeepaliveSup, Ref, Sock]]),
+
+    %% In the event that somebody floods us with connections, the
+    %% reader processes can spew log events at error_logger faster
+    %% than it can keep up, causing its mailbox to grow unbounded
+    %% until we eat all the memory available and crash. So here is a
+    %% meaningless synchronous call to the underlying gen_event
+    %% mechanism. When it returns the mailbox is drained, and we
+    %% return to our caller to accept more connections.
+    gen_event:which_handlers(error_logger),
+
+    {ok, Pid}.
 
-conserve_resources(Pid, _, Conserve) ->
+conserve_resources(Pid, _, {_, Conserve, _}) ->
     Pid ! {conserve_resources, Conserve},
     ok.
 
 %%----------------------------------------------------------------------------
 
-init([]) ->
-    {ok, undefined, hibernate, {backoff, 1000, 1000, 10000}}.
-
-handle_call(Msg, From, State) ->
-    {stop, {mqtt_unexpected_call, Msg, From}, State}.
-
-handle_cast({go, Sock0, SockTransform, KeepaliveSup}, undefined) ->
+init([KeepaliveSup, Ref, Sock]) ->
     process_flag(trap_exit, true),
-    case rabbit_net:connection_string(Sock0, inbound) of
+    rabbit_net:accept_ack(Ref, Sock),
+    case rabbit_net:connection_string(Sock, inbound) of
         {ok, ConnStr} ->
-            log(info, "accepting MQTT connection ~p (~s)~n", [self(), ConnStr]),
-            case SockTransform(Sock0) of
-                {ok, Sock} ->
-                    rabbit_alarm:register(
-                      self(), {?MODULE, conserve_resources, []}),
-                    ProcessorState = rabbit_mqtt_processor:initial_state(Sock,ssl_login_name(Sock)),
-                    {noreply,
-                     control_throttle(
-                       #state{socket           = Sock,
-                              conn_name        = ConnStr,
-                              await_recv       = false,
-                              connection_state = running,
-                              keepalive        = {none, none},
-                              keepalive_sup    = KeepaliveSup,
-                              conserve         = false,
-                              parse_state      = rabbit_mqtt_frame:initial_state(),
-                              proc_state       = ProcessorState }),
-                     hibernate};
-                {error, Reason} ->
-                    rabbit_net:fast_close(Sock0),
-                    {stop, {network_error, Reason, ConnStr}, undefined}
-            end;
+            log(debug, "MQTT accepting TCP connection ~p (~s)~n", [self(), ConnStr]),
+            rabbit_alarm:register(
+              self(), {?MODULE, conserve_resources, []}),
+            ProcessorState = rabbit_mqtt_processor:initial_state(Sock,ssl_login_name(Sock)),
+            gen_server2:enter_loop(?MODULE, [],
+             rabbit_event:init_stats_timer(
+              control_throttle(
+               #state{socket                 = Sock,
+                      conn_name              = ConnStr,
+                      await_recv             = false,
+                      connection_state       = running,
+                      received_connect_frame = false,
+                      keepalive              = {none, none},
+                      keepalive_sup          = KeepaliveSup,
+                      conserve               = false,
+                      parse_state            = rabbit_mqtt_frame:initial_state(),
+                      proc_state             = ProcessorState }), #state.stats_timer),
+             {backoff, 1000, 1000, 10000});
         {network_error, Reason} ->
-            rabbit_net:fast_close(Sock0),
-            {stop, {shutdown, Reason}, undefined};
+            rabbit_net:fast_close(Sock),
+            terminate({shutdown, Reason}, undefined);
         {error, enotconn} ->
-            rabbit_net:fast_close(Sock0),
-            {stop, shutdown, undefined};
+            rabbit_net:fast_close(Sock),
+            terminate(shutdown, undefined);
         {error, Reason} ->
-            rabbit_net:fast_close(Sock0),
-            {stop, {network_error, Reason}, undefined}
-    end;
+            rabbit_net:fast_close(Sock),
+            terminate({network_error, Reason}, undefined)
+    end.
+
+handle_call(Msg, From, State) ->
+    {stop, {mqtt_unexpected_call, Msg, From}, State}.
 
 handle_cast(duplicate_id,
             State = #state{ proc_state = PState,
@@ -111,7 +119,11 @@ handle_info({inet_reply, _Ref, ok}, State) ->
     {noreply, State, hibernate};
 
 handle_info({inet_async, Sock, _Ref, {ok, Data}},
-            State = #state{ socket = Sock }) ->
+            State = #state{ socket = Sock, connection_state = blocked }) ->
+    {noreply, State#state{ deferred_recv = Data }, hibernate};
+
+handle_info({inet_async, Sock, _Ref, {ok, Data}},
+            State = #state{ socket = Sock, connection_state = running }) ->
     process_received_bytes(
       Data, control_throttle(State #state{ await_recv = false }));
 
@@ -122,11 +134,12 @@ handle_info({inet_reply, _Sock, {error, Reason}}, State = #state {}) ->
     network_error(Reason, State);
 
 handle_info({conserve_resources, Conserve}, State) ->
-    {noreply, control_throttle(State #state{ conserve = Conserve }), hibernate};
+    maybe_process_deferred_recv(
+        control_throttle(State #state{ conserve = Conserve }));
 
 handle_info({bump_credit, Msg}, State) ->
     credit_flow:handle_bump_msg(Msg),
-    {noreply, control_throttle(State), hibernate};
+    maybe_process_deferred_recv(control_throttle(State));
 
 handle_info({start_keepalives, Keepalive},
             State = #state { keepalive_sup = KeepaliveSup, socket = Sock }) ->
@@ -143,49 +156,56 @@ handle_info(keepalive_timeout, State = #state {conn_name = ConnStr,
     log(error, "closing MQTT connection ~p (keepalive timeout)~n", [ConnStr]),
     send_will_and_terminate(PState, {shutdown, keepalive_timeout}, State);
 
+handle_info(emit_stats, State) ->
+    {noreply, emit_stats(State), hibernate};
+
 handle_info(Msg, State) ->
     {stop, {mqtt_unexpected_msg, Msg}, State}.
 
-terminate({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) ->
+terminate(Reason, State) ->
+    maybe_emit_stats(State),
+    do_terminate(Reason, State).
+
+do_terminate({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) ->
     log(error, "MQTT detected TLS upgrade error on ~s: connection closed~n",
        [ConnStr]);
 
-terminate({network_error,
+do_terminate({network_error,
            {ssl_upgrade_error,
             {tls_alert, "handshake failure"}}, ConnStr}, _State) ->
     log(error, "MQTT detected TLS upgrade error on ~s: handshake failure~n",
        [ConnStr]);
 
-terminate({network_error,
+do_terminate({network_error,
            {ssl_upgrade_error,
             {tls_alert, "unknown ca"}}, ConnStr}, _State) ->
     log(error, "MQTT detected TLS certificate verification error on ~s: alert 'unknown CA'~n",
        [ConnStr]);
 
-terminate({network_error,
+do_terminate({network_error,
            {ssl_upgrade_error,
             {tls_alert, Alert}}, ConnStr}, _State) ->
     log(error, "MQTT detected TLS upgrade error on ~s: alert ~s~n",
        [ConnStr, Alert]);
 
-terminate({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) ->
+do_terminate({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) ->
     log(error, "MQTT detected TLS upgrade error on ~s: ~p~n",
         [ConnStr, Reason]);
 
-terminate({network_error, Reason, ConnStr}, _State) ->
+do_terminate({network_error, Reason, ConnStr}, _State) ->
     log(error, "MQTT detected network error on ~s: ~p~n",
         [ConnStr, Reason]);
 
-terminate({network_error, Reason}, _State) ->
+do_terminate({network_error, Reason}, _State) ->
     log(error, "MQTT detected network error: ~p~n", [Reason]);
 
-terminate(normal, #state{proc_state = ProcState,
+do_terminate(normal, #state{proc_state = ProcState,
                          conn_name  = ConnName}) ->
     rabbit_mqtt_processor:close_connection(ProcState),
     log(info, "closing MQTT connection ~p (~s)~n", [self(), ConnName]),
     ok;
 
-terminate(_Reason, #state{proc_state = ProcState}) ->
+do_terminate(_Reason, #state{proc_state = ProcState}) ->
     rabbit_mqtt_processor:close_connection(ProcState),
     ok.
 
@@ -205,8 +225,19 @@ ssl_login_name(Sock) ->
 
 %%----------------------------------------------------------------------------
 
+log_new_connection(#state{conn_name = ConnStr}) ->
+    log(info, "accepting MQTT connection ~p (~s)~n", [self(), ConnStr]).
+
+process_received_bytes(<<>>, State = #state{proc_state = ProcState,
+                                            received_connect_frame = false}) ->
+    MqttConn = ProcState#proc_state.connection,
+    case MqttConn of
+        undefined -> ok;
+        _         -> log_new_connection(State)
+    end,
+    {noreply, ensure_stats_timer(State#state{ received_connect_frame = true }), hibernate};
 process_received_bytes(<<>>, State) ->
-    {noreply, State, hibernate};
+    {noreply, ensure_stats_timer(State), hibernate};
 process_received_bytes(Bytes,
                        State = #state{ parse_state = ParseState,
                                        proc_state  = ProcState,
@@ -214,16 +245,17 @@ process_received_bytes(Bytes,
     case rabbit_mqtt_frame:parse(Bytes, ParseState) of
         {more, ParseState1} ->
             {noreply,
-             control_throttle( State #state{ parse_state = ParseState1 }),
+             ensure_stats_timer(control_throttle( State #state{ parse_state = ParseState1 })),
              hibernate};
         {ok, Frame, Rest} ->
             case rabbit_mqtt_processor:process_frame(Frame, ProcState) of
-                {ok, ProcState1} ->
+                {ok, ProcState1, ConnPid} ->
                     PS = rabbit_mqtt_frame:initial_state(),
                     process_received_bytes(
                       Rest,
                       State #state{ parse_state = PS,
-                                    proc_state = ProcState1 });
+                                    proc_state = ProcState1,
+                                    connection = ConnPid });
                 {error, Reason, ProcState1} ->
                     log(info, "MQTT protocol error ~p for connection ~p~n",
                         [Reason, ConnStr]),
@@ -267,7 +299,12 @@ send_will_and_terminate(PState, Reason, State) ->
 network_error(closed,
               State = #state{ conn_name  = ConnStr,
                               proc_state = PState }) ->
-    log(info, "MQTT detected network error for ~p: peer closed TCP connection~n",
+    MqttConn = PState#proc_state.connection,
+    log(case MqttConn of
+            undefined  -> debug;
+            _          -> info
+        end,
+        "MQTT detected network error for ~p: peer closed TCP connection~n",
         [ConnStr]),
     send_will_and_terminate(PState, State);
 
@@ -279,6 +316,8 @@ network_error(Reason,
 
 run_socket(State = #state{ connection_state = blocked }) ->
     State;
+run_socket(State = #state{ deferred_recv = Data }) when Data =/= undefined ->
+    State;
 run_socket(State = #state{ await_recv = true }) ->
     State;
 run_socket(State = #state{ socket = Sock }) ->
@@ -297,3 +336,27 @@ control_throttle(State = #state{ connection_state = Flow,
                                                 connection_state = running });
         {_,            _} -> run_socket(State)
     end.
+
+maybe_process_deferred_recv(State = #state{ deferred_recv = undefined }) ->
+    {noreply, State, hibernate};
+maybe_process_deferred_recv(State = #state{ deferred_recv = Data, socket = Sock }) ->
+    handle_info({inet_async, Sock, noref, {ok, Data}},
+                State#state{ deferred_recv = undefined }).
+
+maybe_emit_stats(State) ->
+    rabbit_event:if_enabled(State, #state.stats_timer,
+                            fun() -> emit_stats(State) end).
+
+emit_stats(State=#state{socket=Sock, connection_state=ConnState, connection=Conn}) ->
+    SockInfos = case rabbit_net:getstat(Sock,
+            [recv_oct, recv_cnt, send_oct, send_cnt, send_pend]) of
+        {ok,    SI} -> SI;
+        {error,  _} -> []
+    end,
+    Infos = [{pid, Conn}, {state, ConnState}|SockInfos],
+    rabbit_event:notify(connection_stats, Infos),
+    State1 = rabbit_event:reset_stats_timer(State, #state.stats_timer),
+    ensure_stats_timer(State1).
+
+ensure_stats_timer(State = #state{}) ->
+    rabbit_event:ensure_stats_timer(State, #state.stats_timer, emit_stats).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl
new file mode 100644 (file)
index 0000000..05950f4
--- /dev/null
@@ -0,0 +1,32 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mqtt_retained_msg_store).
+
+-export([behaviour_info/1, table_name_for/1]).
+
+behaviour_info(callbacks) ->
+    [{new,       2},
+     {recover,   2},
+     {insert,    3},
+     {lookup,    2},
+     {delete,    2},
+     {terminate, 1}];
+behaviour_info(_Other) ->
+    undefined.
+
+table_name_for(VHost) ->
+  rabbit_mqtt_util:vhost_name_to_table_name(VHost).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl
new file mode 100644 (file)
index 0000000..ada0336
--- /dev/null
@@ -0,0 +1,63 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mqtt_retained_msg_store_dets).
+
+-behaviour(rabbit_mqtt_retained_msg_store).
+-include("rabbit_mqtt.hrl").
+
+-export([new/2, recover/2, insert/3, lookup/2, delete/2, terminate/1]).
+
+-record(store_state, {
+  %% DETS table name
+  table
+}).
+
+
+new(Dir, VHost) ->
+  Tid = open_table(Dir, VHost),
+  #store_state{table = Tid}.
+
+recover(Dir, VHost) ->
+  case open_table(Dir, VHost) of
+    {error, _} -> {error, uninitialized};
+    {ok, Tid}  -> {ok, #store_state{table = Tid}}
+  end.
+
+insert(Topic, Msg, #store_state{table = T}) ->
+  ok = dets:insert(T, #retained_message{topic = Topic, mqtt_msg = Msg}).
+
+lookup(Topic, #store_state{table = T}) ->
+  case dets:lookup(T, Topic) of
+    []      -> not_found;
+    [Entry] -> Entry
+  end.
+
+delete(Topic, #store_state{table = T}) ->
+  ok = dets:delete(T, Topic).
+
+terminate(#store_state{table = T}) ->
+  ok = dets:close(T).
+
+open_table(Dir, VHost) ->
+  dets:open_file(rabbit_mqtt_retained_msg_store:table_name_for(VHost),
+    table_options(rabbit_mqtt_util:path_for(Dir, VHost, ".dets"))).
+
+table_options(Path) ->
+  [{type, set}, {keypos, #retained_message.topic},
+    {file, Path}, {ram_file, true}, {repair, true},
+    {auto_save, rabbit_misc:get_env(rabbit_mqtt,
+                                    retained_message_store_dets_sync_interval, 2000)}].
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl
new file mode 100644 (file)
index 0000000..eaa769c
--- /dev/null
@@ -0,0 +1,63 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mqtt_retained_msg_store_ets).
+
+-behaviour(rabbit_mqtt_retained_msg_store).
+-include("rabbit_mqtt.hrl").
+
+-export([new/2, recover/2, insert/3, lookup/2, delete/2, terminate/1]).
+
+-record(store_state, {
+  %% ETS table ID
+  table,
+  %% where the table is stored on disk
+  filename
+}).
+
+
+new(Dir, VHost) ->
+  Path = rabbit_mqtt_util:path_for(Dir, VHost),
+  TableName = rabbit_mqtt_retained_msg_store:table_name_for(VHost),
+  file:delete(Path),
+  Tid = ets:new(TableName, [set, public, {keypos, #retained_message.topic}]),
+  #store_state{table = Tid, filename = Path}.
+
+recover(Dir, VHost) ->
+  Path = rabbit_mqtt_util:path_for(Dir, VHost),
+  case ets:file2tab(Path) of
+    {ok, Tid}  -> file:delete(Path),
+                  {ok, #store_state{table = Tid, filename = Path}};
+    {error, _} -> {error, uninitialized}
+  end.
+
+insert(Topic, Msg, #store_state{table = T}) ->
+  true = ets:insert(T, #retained_message{topic = Topic, mqtt_msg = Msg}),
+  ok.
+
+lookup(Topic, #store_state{table = T}) ->
+  case ets:lookup(T, Topic) of
+    []      -> not_found;
+    [Entry] -> Entry
+  end.
+
+delete(Topic, #store_state{table = T}) ->
+  true = ets:delete(T, Topic),
+  ok.
+
+terminate(#store_state{table = T, filename = Path}) ->
+  ok = ets:tab2file(T, Path,
+                    [{extended_info, [object_count]}]).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl
new file mode 100644 (file)
index 0000000..665e607
--- /dev/null
@@ -0,0 +1,107 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mqtt_retainer).
+
+-behaviour(gen_server2).
+-include("rabbit_mqtt.hrl").
+-include("rabbit_mqtt_frame.hrl").
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+         terminate/2, code_change/3, start_link/2]).
+
+-export([retain/3, fetch/2, clear/2, store_module/0]).
+
+-define(SERVER, ?MODULE).
+-define(TIMEOUT, 30000).
+
+-record(retainer_state, {store_mod,
+                         store}).
+
+-spec retain(pid(), string(), mqtt_msg()) ->
+    {noreply, NewState :: term()} |
+    {noreply, NewState :: term(), timeout() | hibernate} |
+    {stop, Reason :: term(), NewState :: term()}.
+
+%%----------------------------------------------------------------------------
+
+start_link(RetainStoreMod, VHost) ->
+    gen_server2:start_link(?MODULE, [RetainStoreMod, VHost], []).
+
+retain(Pid, Topic, Msg = #mqtt_msg{retain = true}) ->
+    gen_server2:cast(Pid, {retain, Topic, Msg});
+
+retain(_Pid, _Topic, Msg = #mqtt_msg{retain = false}) ->
+    throw({error, {retain_is_false, Msg}}).
+
+fetch(Pid, Topic) ->
+    gen_server2:call(Pid, {fetch, Topic}, ?TIMEOUT).
+
+clear(Pid, Topic) ->
+    gen_server2:cast(Pid, {clear, Topic}).
+
+%%----------------------------------------------------------------------------
+
+init([StoreMod, VHost]) ->
+    process_flag(trap_exit, true),
+    State = case StoreMod:recover(store_dir(), VHost) of
+                {ok, Store} -> #retainer_state{store = Store,
+                                               store_mod = StoreMod};
+                {error, _}  -> #retainer_state{store = StoreMod:new(store_dir(), VHost),
+                                               store_mod = StoreMod}
+            end,
+    {ok, State}.
+
+store_module() ->
+    case application:get_env(rabbitmq_mqtt, retained_message_store) of
+        {ok, Mod} -> Mod;
+        undefined -> undefined
+    end.
+
+%%----------------------------------------------------------------------------
+
+handle_cast({retain, Topic, Msg},
+    State = #retainer_state{store = Store, store_mod = Mod}) ->
+    ok = Mod:insert(Topic, Msg, Store),
+    {noreply, State};
+handle_cast({clear, Topic},
+    State = #retainer_state{store = Store, store_mod = Mod}) ->
+    ok = Mod:delete(Topic, Store),
+    {noreply, State}.
+
+handle_call({fetch, Topic}, _From,
+    State = #retainer_state{store = Store, store_mod = Mod}) ->
+    Reply = case Mod:lookup(Topic, Store) of
+                #retained_message{mqtt_msg = Msg} -> Msg;
+                not_found                         -> undefined
+            end,
+    {reply, Reply, State}.
+
+handle_info(stop, State) ->
+    {stop, normal, State};
+
+handle_info(Info, State) ->
+    {stop, {unknown_info, Info}, State}.
+
+store_dir() ->
+    rabbit_mnesia:dir().
+
+terminate(_Reason, #retainer_state{store = Store, store_mod = Mod}) ->
+    Mod:terminate(Store),
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl
new file mode 100644 (file)
index 0000000..17ee6d2
--- /dev/null
@@ -0,0 +1,60 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mqtt_retainer_sup).
+-behaviour(supervisor2).
+
+-export([start_link/1, init/1, start_child/2,start_child/1, child_for_vhost/1,
+         delete_child/1]).
+
+-define(ENCODING, utf8).
+
+-spec start_child(binary()) -> supervisor2:startchild_ret().
+-spec start_child(term(), binary()) -> supervisor2:startchild_ret().
+
+start_link(SupName) ->
+  supervisor2:start_link(SupName, ?MODULE, []).
+
+child_for_vhost(VHost) when is_binary(VHost) ->
+  case rabbit_mqtt_retainer_sup:start_child(VHost) of
+    {ok, Pid}                       -> Pid;
+    {error, {already_started, Pid}} -> Pid
+  end.
+
+start_child(VHost) when is_binary(VHost) ->
+  start_child(rabbit_mqtt_retainer:store_module(), VHost).
+
+start_child(RetainStoreMod, VHost) ->
+  supervisor2:start_child(?MODULE,
+    {binary_to_atom(VHost, ?ENCODING),
+      {rabbit_mqtt_retainer, start_link, [RetainStoreMod, VHost]},
+      permanent, 60, worker, [rabbit_mqtt_retainer]}).
+
+delete_child(VHost) ->
+  Id = binary_to_atom(VHost, ?ENCODING),
+  ok = supervisor2:terminate_child(?MODULE, Id),
+  ok = supervisor2:delete_child(?MODULE, Id).
+
+init([]) ->
+  Mod = rabbit_mqtt_retainer:store_module(),
+  rabbit_log:info("MQTT retained message store: ~p~n",
+    [Mod]),
+  {ok, {{one_for_one, 5, 5}, child_specs(Mod, rabbit_vhost:list())}}.
+
+child_specs(Mod, VHosts) ->
+  [{binary_to_atom(V, ?ENCODING),
+      {rabbit_mqtt_retainer, start_link, [Mod, V]},
+      permanent, infinity, worker, [rabbit_mqtt_retainer]} || V <- VHosts].
similarity index 52%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_sup.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl
index 38edcf9659ebf9599bfb364a1da36e23b00b531b..6a43cb583ca07a2e8c069b7ba0eb3e07ba6dbfeb 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_sup).
 -behaviour(supervisor2).
 
--define(MAX_WAIT, 16#ffffffff).
+-include_lib("rabbit_common/include/rabbit.hrl").
 
 -export([start_link/2, init/1]).
 
--export([start_client/1, start_ssl_client/2]).
-
 start_link(Listeners, []) ->
     supervisor2:start_link({local, ?MODULE}, ?MODULE, [Listeners]).
 
 init([{Listeners, SslListeners0}]) ->
+    NumTcpAcceptors = application:get_env(rabbitmq_mqtt, num_tcp_acceptors, 10),
     {ok, SocketOpts} = application:get_env(rabbitmq_mqtt, tcp_listen_options),
-    {SslOpts, SslListeners}
+    {SslOpts, NumSslAcceptors, SslListeners}
         = case SslListeners0 of
-              [] -> {none, []};
+              [] -> {none, 0, []};
               _  -> {rabbit_networking:ensure_ssl(),
+                     application:get_env(rabbitmq_mqtt, num_ssl_acceptors, 1),
                      case rabbit_networking:poodle_check('MQTT') of
                          ok     -> SslListeners0;
                          danger -> []
@@ -40,47 +40,28 @@ init([{Listeners, SslListeners0}]) ->
     {ok, {{one_for_all, 10, 10},
           [{collector,
             {rabbit_mqtt_collector, start_link, []},
-            transient, ?MAX_WAIT, worker, [rabbit_mqtt_collector]},
-           {rabbit_mqtt_client_sup,
-            {rabbit_client_sup, start_link, [{local, rabbit_mqtt_client_sup},
-                                             {rabbit_mqtt_connection_sup, start_link, []}]},
-            transient, infinity, supervisor, [rabbit_client_sup]} |
+            transient, ?WORKER_WAIT, worker, [rabbit_mqtt_collector]},
+           {rabbit_mqtt_retainer_sup,
+            {rabbit_mqtt_retainer_sup, start_link, [{local, rabbit_mqtt_retainer_sup}]},
+             transient, ?SUPERVISOR_WAIT, supervisor, [rabbit_mqtt_retainer_sup]} |
            listener_specs(fun tcp_listener_spec/1,
-                          [SocketOpts], Listeners) ++
+                          [SocketOpts, NumTcpAcceptors], Listeners) ++
            listener_specs(fun ssl_listener_spec/1,
-                          [SocketOpts, SslOpts], SslListeners)]}}.
+                          [SocketOpts, SslOpts, NumSslAcceptors], SslListeners)]}}.
 
 listener_specs(Fun, Args, Listeners) ->
     [Fun([Address | Args]) ||
         Listener <- Listeners,
         Address  <- rabbit_networking:tcp_listener_addresses(Listener)].
 
-tcp_listener_spec([Address, SocketOpts]) ->
+tcp_listener_spec([Address, SocketOpts, NumAcceptors]) ->
     rabbit_networking:tcp_listener_spec(
       rabbit_mqtt_listener_sup, Address, SocketOpts,
-      mqtt, "MQTT TCP Listener",
-      {?MODULE, start_client, []}).
+      ranch_tcp, rabbit_mqtt_connection_sup, [],
+      mqtt, NumAcceptors, "MQTT TCP Listener").
 
-ssl_listener_spec([Address, SocketOpts, SslOpts]) ->
+ssl_listener_spec([Address, SocketOpts, SslOpts, NumAcceptors]) ->
     rabbit_networking:tcp_listener_spec(
-      rabbit_mqtt_listener_sup, Address, SocketOpts,
-      'mqtt/ssl', "MQTT SSL Listener",
-      {?MODULE, start_ssl_client, [SslOpts]}).
-
-start_client(Sock, SockTransform) ->
-    {ok, _, {KeepaliveSup, Reader}} =
-        supervisor2:start_child(rabbit_mqtt_client_sup, []),
-    ok = rabbit_net:controlling_process(Sock, Reader),
-    ok = gen_server2:cast(Reader, {go, Sock, SockTransform, KeepaliveSup}),
-
-    %% see comment in rabbit_networking:start_client/2
-    gen_event:which_handlers(error_logger),
-    Reader.
-
-start_client(Sock) ->
-    start_client(Sock, fun (S) -> {ok, S} end).
-
-start_ssl_client(SslOpts, Sock) ->
-    Transform = rabbit_networking:ssl_transform_fun(SslOpts),
-    start_client(Sock, Transform).
-
+      rabbit_mqtt_listener_sup, Address, SocketOpts ++ SslOpts,
+      ranch_ssl, rabbit_mqtt_connection_sup, [],
+      'mqtt/ssl', NumAcceptors, "MQTT SSL Listener").
similarity index 63%
rename from rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbit_mqtt_util.erl
rename to deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl
index 336e2a4062f70b5cc10d4dd837c4d96d5f2e8f52..542b68e4d343e817455bff169404955173a9e962 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_mqtt_util).
@@ -47,16 +47,31 @@ env(Key) ->
         undefined -> undefined
     end.
 
-coerce_env_value(default_pass, Val) -> to_binary(Val);
-coerce_env_value(default_user, Val) -> to_binary(Val);
-coerce_env_value(exchange, Val)     -> to_binary(Val);
-coerce_env_value(vhost, Val)        -> to_binary(Val);
+%% TODO: move to rabbit_common
+coerce_env_value(default_pass, Val) -> rabbit_data_coercion:to_binary(Val);
+coerce_env_value(default_user, Val) -> rabbit_data_coercion:to_binary(Val);
+coerce_env_value(exchange, Val)     -> rabbit_data_coercion:to_binary(Val);
+coerce_env_value(vhost, Val)        -> rabbit_data_coercion:to_binary(Val);
 coerce_env_value(_, Val)            -> Val.
 
-to_binary(Val) when is_list(Val) -> list_to_binary(Val);
-to_binary(Val)                   -> Val.
-
 table_lookup(undefined, _Key) ->
     undefined;
 table_lookup(Table, Key) ->
     rabbit_misc:table_lookup(Table, Key).
+
+vhost_name_to_dir_name(VHost) ->
+    vhost_name_to_dir_name(VHost, ".ets").
+vhost_name_to_dir_name(VHost, Suffix) ->
+    <<Num:128>> = erlang:md5(VHost),
+    "mqtt_retained_" ++ rabbit_misc:format("~36.16.0b", [Num]) ++ Suffix.
+
+path_for(Dir, VHost) ->
+  filename:join(Dir, vhost_name_to_dir_name(VHost)).
+
+path_for(Dir, VHost, Suffix) ->
+  filename:join(Dir, vhost_name_to_dir_name(VHost, Suffix)).
+
+
+vhost_name_to_table_name(VHost) ->
+  <<Num:128>> = erlang:md5(VHost),
+  list_to_atom("rabbit_mqtt_retained_" ++ rabbit_misc:format("~36.16.0b", [Num])).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_vhost_event_handler.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_vhost_event_handler.erl
new file mode 100644 (file)
index 0000000..7af7899
--- /dev/null
@@ -0,0 +1,49 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%
+
+-module(rabbit_mqtt_vhost_event_handler).
+
+-behaviour(gen_event).
+
+-export([init/1, handle_event/2, handle_call/2, handle_info/2, terminate/2, code_change/3]).
+
+-import(rabbit_misc, [pget/2]).
+
+init([]) ->
+  {ok, []}.
+
+handle_event({event, vhost_created, Info, _, _}, State) ->
+  Name = pget(name, Info),
+  rabbit_mqtt_retainer_sup:child_for_vhost(Name),
+  {ok, State};
+handle_event({event, vhost_deleted, Info, _, _}, State) ->
+  Name = pget(name, Info),
+  rabbit_mqtt_retainer_sup:delete_child(Name),
+  {ok, State};
+handle_event(_Event, State) ->
+  {ok, State}.
+
+handle_call(_Request, State) ->
+  {ok, State}.
+
+handle_info(_Info, State) ->
+  {ok, State}.
+
+terminate(_Reason, _State) ->
+  ok.
+
+code_change(_OldVsn, State, _Extra) ->
+  {ok, State}.
diff --git a/deps/rabbitmq_mqtt/src/rabbitmq_mqtt.app.src b/deps/rabbitmq_mqtt/src/rabbitmq_mqtt.app.src
new file mode 100644 (file)
index 0000000..a579999
--- /dev/null
@@ -0,0 +1,25 @@
+{application, rabbitmq_mqtt,
+ [{description, "RabbitMQ MQTT Adapter"},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {mod, {rabbit_mqtt, []}},
+  {env, [{default_user, <<"guest">>},
+         {default_pass, <<"guest">>},
+         {ssl_cert_login,false},
+         %% To satisfy an unfortunate expectation from popular MQTT clients.
+         {allow_anonymous, true},
+         {vhost, <<"/">>},
+         {exchange, <<"amq.topic">>},
+         {subscription_ttl, 86400000}, %% 24 hours
+         {retained_message_store, rabbit_mqtt_retained_msg_store_dets},
+         %% only used by DETS store
+         {retained_message_store_dets_sync_interval, 2000},
+         {prefetch, 10},
+         {ssl_listeners, []},
+         {num_ssl_acceptors, 1},
+         {tcp_listeners, [1883]},
+         {num_tcp_acceptors, 10},
+         {tcp_listen_options, [{backlog,   128},
+                               {nodelay,   true}]}]},
+  {applications, [kernel, stdlib, rabbit_common, rabbit, ranch, amqp_client]}]}.
diff --git a/deps/rabbitmq_recent_history_exchange/CODE_OF_CONDUCT.md b/deps/rabbitmq_recent_history_exchange/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md b/deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
similarity index 94%
rename from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-eldap
rename to deps/rabbitmq_recent_history_exchange/LICENSE.md
index 1f6200918f7b8b90047ae33eac3c175507dd76b1..2f364a147bdea6b2fcd4b57575f72b3dbcd0bf0d 100644 (file)
@@ -1,6 +1,7 @@
+The MIT License
+
+Copyright (c) 2011 Alvaro Videla
 
-Copyright (c) 2010, Torbjorn Tornkvist
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
 in the Software without restriction, including without limitation the rights
@@ -17,5 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
+THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/rabbitmq_recent_history_exchange/Makefile b/deps/rabbitmq_recent_history_exchange/Makefile
new file mode 100644 (file)
index 0000000..ac7f04f
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_recent_history_exchange
+
+DEPS = rabbit_common rabbit
+TEST_DEPS = rabbitmq_ct_helpers amqp_client
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_recent_history_exchange/README.md b/deps/rabbitmq_recent_history_exchange/README.md
new file mode 100644 (file)
index 0000000..1a262da
--- /dev/null
@@ -0,0 +1,68 @@
+# RabbitMQ Recent History Cache
+
+Keeps track of the last 20 messages that passed through the exchange. Every time a queue is bound to the exchange it delivers that last 20 messages to them. This is useful for implementing a very simple __Chat History__ where clients that join the conversation can get the latest messages.
+
+Exchange Type: `x-recent-history`
+
+## Installation ##
+
+Install the corresponding .ez files from our
+[Community Plugins page](http://www.rabbitmq.com/community-plugins.html).
+
+Then run the following command:
+
+```bash
+rabbitmq-plugins enable rabbitmq_recent_history_exchange
+```
+
+## Building from Source ##
+
+Install and setup the RabbitMQ Public Umbrella as explained here: [http://www.rabbitmq.com/plugin-development.html#getting-started](http://www.rabbitmq.com/plugin-development.html#getting-started).
+
+Then `cd` into the umbrella folder and type:
+
+    $ git clone git://github.com/videlalvaro/rabbitmq-recent-history-exchange.git
+    $ cd rabbitmq-recent-history-exchange
+    $ make
+
+Finally copy all the `*.ez` files inside the `dist` folder to the `$RABBITMQ_HOME/plugins` folder. Don't copy the file `rabbit_common-x.y.z` since it's not needed inside the broker installation.
+
+## Usage ##
+
+### Creating an exchange  ###
+
+To create a _recent history exchange_, just declare an exchange providing the type `"x-recent-history"`.
+
+```java
+channel.exchangeDeclare("logs", "x-recent-history");
+```
+
+### Providing a custom history length ###
+
+Typically this exchange will store the latest 20 messages sent over
+the exchange. If you want to set a different cache length, then you
+can pass a `"x-recent-history-length"` argument to `exchange.declare`.
+The argument must be an integer greater or equal to zero.
+
+For example in Java:
+
+```java
+Map<String, Object> args = new HashMap<String, Object>();
+args.put("x-recent-history-length", 60);
+channel.exchangeDeclare("rh", "x-recent-history", false, false, args);
+```
+
+### Preventing some messages from being stored ###
+
+In case you would like to not store certain messages, just
+add the header `"x-recent-history-no-store"` with the value `true` to
+the message.
+
+## Disabling the Plugin ##
+
+A future version of RabbitMQ will allow users to disable plugins. When
+you disable this plugin, it will delete all the cached messages.
+
+## License
+
+See LICENSE.md
diff --git a/deps/rabbitmq_recent_history_exchange/erlang.mk b/deps/rabbitmq_recent_history_exchange/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_recent_history_exchange/etc/rabbit-hare.config b/deps/rabbitmq_recent_history_exchange/etc/rabbit-hare.config
new file mode 100644 (file)
index 0000000..3574dfb
--- /dev/null
@@ -0,0 +1,3 @@
+%% -*- erlang -*-
+%% Note - we still need this for rabbit_exchange_type_recent_history_test_util:plugin_dir/0 to work...
+[].
diff --git a/deps/rabbitmq_recent_history_exchange/etc/rabbit-test.config b/deps/rabbitmq_recent_history_exchange/etc/rabbit-test.config
new file mode 100644 (file)
index 0000000..3574dfb
--- /dev/null
@@ -0,0 +1,3 @@
+%% -*- erlang -*-
+%% Note - we still need this for rabbit_exchange_type_recent_history_test_util:plugin_dir/0 to work...
+[].
diff --git a/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl b/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl
new file mode 100644 (file)
index 0000000..1640154
--- /dev/null
@@ -0,0 +1,19 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 2.0 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ Delayed Message
+%%
+%%  The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-define(KEEP_NB, 20).
+-define(RH_TABLE, rh_exchange_table).
+-record(cached, {key, content}).
diff --git a/deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk b/deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl b/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl
new file mode 100644 (file)
index 0000000..7815a99
--- /dev/null
@@ -0,0 +1,212 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 2.0 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ Delayed Message
+%%
+%%  The Initial Developer of the Original Code is Pivotal Software, Inc.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_exchange_type_recent_history).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include("rabbit_recent_history.hrl").
+
+-behaviour(rabbit_exchange_type).
+
+-import(rabbit_misc, [table_lookup/2]).
+
+-export([description/0, serialise_events/0, route/2]).
+-export([validate/1, validate_binding/2, create/2, delete/3, add_binding/3,
+         remove_bindings/3, assert_args_equivalence/2, policy_changed/2]).
+-export([setup_schema/0, disable_plugin/0]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "exchange type x-recent-history"},
+                    {mfa, {rabbit_registry, register,
+                           [exchange, <<"x-recent-history">>, ?MODULE]}},
+                    {cleanup, {?MODULE, disable_plugin, []}},
+                    {requires, rabbit_registry},
+                    {enables, kernel_ready}]}).
+
+-rabbit_boot_step({rabbit_exchange_type_recent_history_mnesia,
+                   [{description, "recent history exchange type: mnesia"},
+                    {mfa, {?MODULE, setup_schema, []}},
+                    {requires, database},
+                    {enables, external_infrastructure}]}).
+
+-define(INTEGER_ARG_TYPES, [byte, short, signedint, long]).
+
+description() ->
+    [{name, <<"x-recent-history">>},
+     {description, <<"Recent History Exchange.">>}].
+
+serialise_events() -> false.
+
+route(#exchange{name      = XName,
+                arguments = Args},
+      #delivery{message = Message}) ->
+    Length = table_lookup(Args, <<"x-recent-history-length">>),
+    maybe_cache_msg(XName, Message, Length),
+    rabbit_router:match_routing_key(XName, ['_']).
+
+validate(#exchange{arguments = Args}) ->
+    case table_lookup(Args, <<"x-recent-history-length">>) of
+        undefined   ->
+            ok;
+        {Type, Val} ->
+            case check_int_arg(Type) of
+                ok when Val > 0 ->
+                    ok;
+                _ ->
+                    rabbit_misc:protocol_error(precondition_failed,
+                                               "Invalid argument ~p, "
+                                               "'x-recent-history-length' "
+                                               "must be a positive integer",
+                                               [Val])
+            end
+    end.
+
+validate_binding(_X, _B) -> ok.
+create(_Tx, _X) -> ok.
+policy_changed(_X1, _X2) -> ok.
+
+delete(transaction, #exchange{ name = XName }, _Bs) ->
+    rabbit_misc:execute_mnesia_transaction(
+      fun() ->
+              mnesia:delete(?RH_TABLE, XName, write)
+      end),
+    ok;
+delete(none, _Exchange, _Bs) ->
+    ok.
+
+add_binding(transaction, #exchange{ name = XName },
+            #binding{ destination = #resource{kind = queue} = QName }) ->
+    case rabbit_amqqueue:lookup(QName) of
+        {error, not_found} ->
+            destination_not_found_error(QName);
+        {ok, Q} ->
+            Msgs = get_msgs_from_cache(XName),
+            deliver_messages([Q], Msgs)
+    end,
+    ok;
+add_binding(transaction, #exchange{ name = XName },
+            #binding{ destination = #resource{kind = exchange} = DestName }) ->
+    case rabbit_exchange:lookup(DestName) of
+        {error, not_found} ->
+            destination_not_found_error(DestName);
+        {ok, X} ->
+            Msgs = get_msgs_from_cache(XName),
+            [begin
+                 Delivery = rabbit_basic:delivery(false, false, Msg, undefined),
+                 Qs = rabbit_exchange:route(X, Delivery),
+                 case rabbit_amqqueue:lookup(Qs) of
+                     [] ->
+                         destination_not_found_error(Qs);
+                     QPids ->
+                         deliver_messages(QPids, [Msg])
+                 end
+             end || Msg <- Msgs]
+    end,
+    ok;
+add_binding(none, _Exchange, _Binding) ->
+    ok.
+
+remove_bindings(_Tx, _X, _Bs) -> ok.
+
+assert_args_equivalence(X, Args) ->
+    rabbit_exchange:assert_args_equivalence(X, Args).
+
+%%----------------------------------------------------------------------------
+
+setup_schema() ->
+    mnesia:create_table(?RH_TABLE,
+                             [{attributes, record_info(fields, cached)},
+                              {record_name, cached},
+                              {type, set}]),
+    mnesia:add_table_copy(?RH_TABLE, node(), ram_copies),
+    mnesia:wait_for_tables([?RH_TABLE], 30000),
+    ok.
+
+disable_plugin() ->
+    rabbit_registry:unregister(exchange, <<"x-recent-history">>),
+    mnesia:delete_table(?RH_TABLE),
+    ok.
+
+%%----------------------------------------------------------------------------
+%%private
+maybe_cache_msg(XName,
+                #basic_message{content =
+                               #content{properties =
+                                        #'P_basic'{headers = Headers}}}
+                = Message,
+                Length) ->
+    case Headers of
+        undefined ->
+            cache_msg(XName, Message, Length);
+        _ ->
+            Store = table_lookup(Headers, <<"x-recent-history-no-store">>),
+            case Store of
+                {bool, true} ->
+                    ok;
+                _ ->
+                    cache_msg(XName, Message, Length)
+            end
+    end.
+
+cache_msg(XName, Message, Length) ->
+    rabbit_misc:execute_mnesia_transaction(
+      fun () ->
+              Cached = get_msgs_from_cache(XName),
+              store_msg(XName, Cached, Message, Length)
+      end).
+
+get_msgs_from_cache(XName) ->
+    rabbit_misc:execute_mnesia_transaction(
+      fun () ->
+              case mnesia:read(?RH_TABLE, XName) of
+                  [] ->
+                      [];
+                  [#cached{key = XName, content=Cached}] ->
+                      Cached
+              end
+      end).
+
+store_msg(Key, Cached, Message, undefined) ->
+    store_msg0(Key, Cached, Message, ?KEEP_NB);
+store_msg(Key, Cached, Message, {_Type, Length}) ->
+    store_msg0(Key, Cached, Message, Length).
+
+store_msg0(Key, Cached, Message, Length) ->
+    mnesia:write(?RH_TABLE,
+                 #cached{key     = Key,
+                         content = [Message|lists:sublist(Cached, Length-1)]},
+                 write).
+
+deliver_messages(Qs, Msgs) ->
+    lists:map(
+      fun (Msg) ->
+              Delivery = rabbit_basic:delivery(false, false, Msg, undefined),
+              rabbit_amqqueue:deliver(Qs, Delivery)
+      end, lists:reverse(Msgs)).
+
+destination_not_found_error(DestName) ->
+    rabbit_misc:protocol_error(
+      internal_error,
+      "could not find queue/exchange '~s'",
+      [DestName]).
+
+%% adapted from rabbit_amqqueue.erl
+check_int_arg(Type) ->
+    case lists:member(Type, ?INTEGER_ARG_TYPES) of
+        true  -> ok;
+        false -> {error, {unacceptable_type, Type}}
+    end.
diff --git a/deps/rabbitmq_recent_history_exchange/src/rabbitmq_recent_history_exchange.app.src b/deps/rabbitmq_recent_history_exchange/src/rabbitmq_recent_history_exchange.app.src
new file mode 100644 (file)
index 0000000..658a6a2
--- /dev/null
@@ -0,0 +1,6 @@
+{application, rabbitmq_recent_history_exchange,
+ [{description, "RabbitMQ Recent History Exchange"},
+  {vsn, "1.2.1"},
+  {modules, []},
+  {registered, []},
+  {applications, [kernel, stdlib, rabbit_common, rabbit, mnesia]}]}.
diff --git a/deps/rabbitmq_sharding/CODE_OF_CONDUCT.md b/deps/rabbitmq_sharding/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_sharding/CONTRIBUTING.md b/deps/rabbitmq_sharding/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_sharding/LICENSE b/deps/rabbitmq_sharding/LICENSE
new file mode 100644 (file)
index 0000000..846de34
--- /dev/null
@@ -0,0 +1,7 @@
+This package, the RabbitMQ Sharding Plugin, is double-licensed under
+the Mozilla Public License 1.1 ("MPL") and the Mozilla Public License
+2.0 ("MPL-2"). For the MPL, please see LICENSE-MPL-RabbitMQ. For the
+MPL-2, please see LICENSE-MPL2.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_sharding/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_sharding/LICENSE-MPL-RabbitMQ
new file mode 100644 (file)
index 0000000..096b2c4
--- /dev/null
@@ -0,0 +1,467 @@
+                          MOZILLA PUBLIC LICENSE
+                                Version 1.1
+
+                              ---------------
+
+1. Definitions.
+
+     1.0.1. "Commercial Use" means distribution or otherwise making the
+     Covered Code available to a third party.
+
+     1.1. "Contributor" means each entity that creates or contributes to
+     the creation of Modifications.
+
+     1.2. "Contributor Version" means the combination of the Original
+     Code, prior Modifications used by a Contributor, and the Modifications
+     made by that particular Contributor.
+
+     1.3. "Covered Code" means the Original Code or Modifications or the
+     combination of the Original Code and Modifications, in each case
+     including portions thereof.
+
+     1.4. "Electronic Distribution Mechanism" means a mechanism generally
+     accepted in the software development community for the electronic
+     transfer of data.
+
+     1.5. "Executable" means Covered Code in any form other than Source
+     Code.
+
+     1.6. "Initial Developer" means the individual or entity identified
+     as the Initial Developer in the Source Code notice required by Exhibit
+     A.
+
+     1.7. "Larger Work" means a work which combines Covered Code or
+     portions thereof with code not governed by the terms of this License.
+
+     1.8. "License" means this document.
+
+     1.8.1. "Licensable" means having the right to grant, to the maximum
+     extent possible, whether at the time of the initial grant or
+     subsequently acquired, any and all of the rights conveyed herein.
+
+     1.9. "Modifications" means any addition to or deletion from the
+     substance or structure of either the Original Code or any previous
+     Modifications. When Covered Code is released as a series of files, a
+     Modification is:
+          A. Any addition to or deletion from the contents of a file
+          containing Original Code or previous Modifications.
+
+          B. Any new file that contains any part of the Original Code or
+          previous Modifications.
+
+     1.10. "Original Code" means Source Code of computer software code
+     which is described in the Source Code notice required by Exhibit A as
+     Original Code, and which, at the time of its release under this
+     License is not already Covered Code governed by this License.
+
+     1.10.1. "Patent Claims" means any patent claim(s), now owned or
+     hereafter acquired, including without limitation,  method, process,
+     and apparatus claims, in any patent Licensable by grantor.
+
+     1.11. "Source Code" means the preferred form of the Covered Code for
+     making modifications to it, including all modules it contains, plus
+     any associated interface definition files, scripts used to control
+     compilation and installation of an Executable, or source code
+     differential comparisons against either the Original Code or another
+     well known, available Covered Code of the Contributor's choice. The
+     Source Code can be in a compressed or archival form, provided the
+     appropriate decompression or de-archiving software is widely available
+     for no charge.
+
+     1.12. "You" (or "Your")  means an individual or a legal entity
+     exercising rights under, and complying with all of the terms of, this
+     License or a future version of this License issued under Section 6.1.
+     For legal entities, "You" includes any entity which controls, is
+     controlled by, or is under common control with You. For purposes of
+     this definition, "control" means (a) the power, direct or indirect,
+     to cause the direction or management of such entity, whether by
+     contract or otherwise, or (b) ownership of more than fifty percent
+     (50%) of the outstanding shares or beneficial ownership of such
+     entity.
+
+2. Source Code License.
+
+     2.1. The Initial Developer Grant.
+     The Initial Developer hereby grants You a world-wide, royalty-free,
+     non-exclusive license, subject to third party intellectual property
+     claims:
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Initial Developer to use, reproduce,
+          modify, display, perform, sublicense and distribute the Original
+          Code (or portions thereof) with or without Modifications, and/or
+          as part of a Larger Work; and
+
+          (b) under Patents Claims infringed by the making, using or
+          selling of Original Code, to make, have made, use, practice,
+          sell, and offer for sale, and/or otherwise dispose of the
+          Original Code (or portions thereof).
+
+          (c) the licenses granted in this Section 2.1(a) and (b) are
+          effective on the date Initial Developer first distributes
+          Original Code under the terms of this License.
+
+          (d) Notwithstanding Section 2.1(b) above, no patent license is
+          granted: 1) for code that You delete from the Original Code; 2)
+          separate from the Original Code;  or 3) for infringements caused
+          by: i) the modification of the Original Code or ii) the
+          combination of the Original Code with other software or devices.
+
+     2.2. Contributor Grant.
+     Subject to third party intellectual property claims, each Contributor
+     hereby grants You a world-wide, royalty-free, non-exclusive license
+
+          (a)  under intellectual property rights (other than patent or
+          trademark) Licensable by Contributor, to use, reproduce, modify,
+          display, perform, sublicense and distribute the Modifications
+          created by such Contributor (or portions thereof) either on an
+          unmodified basis, with other Modifications, as Covered Code
+          and/or as part of a Larger Work; and
+
+          (b) under Patent Claims infringed by the making, using, or
+          selling of  Modifications made by that Contributor either alone
+          and/or in combination with its Contributor Version (or portions
+          of such combination), to make, use, sell, offer for sale, have
+          made, and/or otherwise dispose of: 1) Modifications made by that
+          Contributor (or portions thereof); and 2) the combination of
+          Modifications made by that Contributor with its Contributor
+          Version (or portions of such combination).
+
+          (c) the licenses granted in Sections 2.2(a) and 2.2(b) are
+          effective on the date Contributor first makes Commercial Use of
+          the Covered Code.
+
+          (d)    Notwithstanding Section 2.2(b) above, no patent license is
+          granted: 1) for any code that Contributor has deleted from the
+          Contributor Version; 2)  separate from the Contributor Version;
+          3)  for infringements caused by: i) third party modifications of
+          Contributor Version or ii)  the combination of Modifications made
+          by that Contributor with other software  (except as part of the
+          Contributor Version) or other devices; or 4) under Patent Claims
+          infringed by Covered Code in the absence of Modifications made by
+          that Contributor.
+
+3. Distribution Obligations.
+
+     3.1. Application of License.
+     The Modifications which You create or to which You contribute are
+     governed by the terms of this License, including without limitation
+     Section 2.2. The Source Code version of Covered Code may be
+     distributed only under the terms of this License or a future version
+     of this License released under Section 6.1, and You must include a
+     copy of this License with every copy of the Source Code You
+     distribute. You may not offer or impose any terms on any Source Code
+     version that alters or restricts the applicable version of this
+     License or the recipients' rights hereunder. However, You may include
+     an additional document offering the additional rights described in
+     Section 3.5.
+
+     3.2. Availability of Source Code.
+     Any Modification which You create or to which You contribute must be
+     made available in Source Code form under the terms of this License
+     either on the same media as an Executable version or via an accepted
+     Electronic Distribution Mechanism to anyone to whom you made an
+     Executable version available; and if made available via Electronic
+     Distribution Mechanism, must remain available for at least twelve (12)
+     months after the date it initially became available, or at least six
+     (6) months after a subsequent version of that particular Modification
+     has been made available to such recipients. You are responsible for
+     ensuring that the Source Code version remains available even if the
+     Electronic Distribution Mechanism is maintained by a third party.
+
+     3.3. Description of Modifications.
+     You must cause all Covered Code to which You contribute to contain a
+     file documenting the changes You made to create that Covered Code and
+     the date of any change. You must include a prominent statement that
+     the Modification is derived, directly or indirectly, from Original
+     Code provided by the Initial Developer and including the name of the
+     Initial Developer in (a) the Source Code, and (b) in any notice in an
+     Executable version or related documentation in which You describe the
+     origin or ownership of the Covered Code.
+
+     3.4. Intellectual Property Matters
+          (a) Third Party Claims.
+          If Contributor has knowledge that a license under a third party's
+          intellectual property rights is required to exercise the rights
+          granted by such Contributor under Sections 2.1 or 2.2,
+          Contributor must include a text file with the Source Code
+          distribution titled "LEGAL" which describes the claim and the
+          party making the claim in sufficient detail that a recipient will
+          know whom to contact. If Contributor obtains such knowledge after
+          the Modification is made available as described in Section 3.2,
+          Contributor shall promptly modify the LEGAL file in all copies
+          Contributor makes available thereafter and shall take other steps
+          (such as notifying appropriate mailing lists or newsgroups)
+          reasonably calculated to inform those who received the Covered
+          Code that new knowledge has been obtained.
+
+          (b) Contributor APIs.
+          If Contributor's Modifications include an application programming
+          interface and Contributor has knowledge of patent licenses which
+          are reasonably necessary to implement that API, Contributor must
+          also include this information in the LEGAL file.
+
+               (c)    Representations.
+          Contributor represents that, except as disclosed pursuant to
+          Section 3.4(a) above, Contributor believes that Contributor's
+          Modifications are Contributor's original creation(s) and/or
+          Contributor has sufficient rights to grant the rights conveyed by
+          this License.
+
+     3.5. Required Notices.
+     You must duplicate the notice in Exhibit A in each file of the Source
+     Code.  If it is not possible to put such notice in a particular Source
+     Code file due to its structure, then You must include such notice in a
+     location (such as a relevant directory) where a user would be likely
+     to look for such a notice.  If You created one or more Modification(s)
+     You may add your name as a Contributor to the notice described in
+     Exhibit A.  You must also duplicate this License in any documentation
+     for the Source Code where You describe recipients' rights or ownership
+     rights relating to Covered Code.  You may choose to offer, and to
+     charge a fee for, warranty, support, indemnity or liability
+     obligations to one or more recipients of Covered Code. However, You
+     may do so only on Your own behalf, and not on behalf of the Initial
+     Developer or any Contributor. You must make it absolutely clear than
+     any such warranty, support, indemnity or liability obligation is
+     offered by You alone, and You hereby agree to indemnify the Initial
+     Developer and every Contributor for any liability incurred by the
+     Initial Developer or such Contributor as a result of warranty,
+     support, indemnity or liability terms You offer.
+
+     3.6. Distribution of Executable Versions.
+     You may distribute Covered Code in Executable form only if the
+     requirements of Section 3.1-3.5 have been met for that Covered Code,
+     and if You include a notice stating that the Source Code version of
+     the Covered Code is available under the terms of this License,
+     including a description of how and where You have fulfilled the
+     obligations of Section 3.2. The notice must be conspicuously included
+     in any notice in an Executable version, related documentation or
+     collateral in which You describe recipients' rights relating to the
+     Covered Code. You may distribute the Executable version of Covered
+     Code or ownership rights under a license of Your choice, which may
+     contain terms different from this License, provided that You are in
+     compliance with the terms of this License and that the license for the
+     Executable version does not attempt to limit or alter the recipient's
+     rights in the Source Code version from the rights set forth in this
+     License. If You distribute the Executable version under a different
+     license You must make it absolutely clear that any terms which differ
+     from this License are offered by You alone, not by the Initial
+     Developer or any Contributor. You hereby agree to indemnify the
+     Initial Developer and every Contributor for any liability incurred by
+     the Initial Developer or such Contributor as a result of any such
+     terms You offer.
+
+     3.7. Larger Works.
+     You may create a Larger Work by combining Covered Code with other code
+     not governed by the terms of this License and distribute the Larger
+     Work as a single product. In such a case, You must make sure the
+     requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+
+     If it is impossible for You to comply with any of the terms of this
+     License with respect to some or all of the Covered Code due to
+     statute, judicial order, or regulation then You must: (a) comply with
+     the terms of this License to the maximum extent possible; and (b)
+     describe the limitations and the code they affect. Such description
+     must be included in the LEGAL file described in Section 3.4 and must
+     be included with all distributions of the Source Code. Except to the
+     extent prohibited by statute or regulation, such description must be
+     sufficiently detailed for a recipient of ordinary skill to be able to
+     understand it.
+
+5. Application of this License.
+
+     This License applies to code to which the Initial Developer has
+     attached the notice in Exhibit A and to related Covered Code.
+
+6. Versions of the License.
+
+     6.1. New Versions.
+     Netscape Communications Corporation ("Netscape") may publish revised
+     and/or new versions of the License from time to time. Each version
+     will be given a distinguishing version number.
+
+     6.2. Effect of New Versions.
+     Once Covered Code has been published under a particular version of the
+     License, You may always continue to use it under the terms of that
+     version. You may also choose to use such Covered Code under the terms
+     of any subsequent version of the License published by Netscape. No one
+     other than Netscape has the right to modify the terms applicable to
+     Covered Code created under this License.
+
+     6.3. Derivative Works.
+     If You create or use a modified version of this License (which you may
+     only do in order to apply it to code which is not already Covered Code
+     governed by this License), You must (a) rename Your license so that
+     the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape",
+     "MPL", "NPL" or any confusingly similar phrase do not appear in your
+     license (except to note that your license differs from this License)
+     and (b) otherwise make it clear that Your version of the license
+     contains terms which differ from the Mozilla Public License and
+     Netscape Public License. (Filling in the name of the Initial
+     Developer, Original Code or Contributor in the notice described in
+     Exhibit A shall not of themselves be deemed to be modifications of
+     this License.)
+
+7. DISCLAIMER OF WARRANTY.
+
+     COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
+     WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+     WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+     DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING.
+     THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE
+     IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT,
+     YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE
+     COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER
+     OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
+     ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+
+     8.1.  This License and the rights granted hereunder will terminate
+     automatically if You fail to comply with terms herein and fail to cure
+     such breach within 30 days of becoming aware of the breach. All
+     sublicenses to the Covered Code which are properly granted shall
+     survive any termination of this License. Provisions which, by their
+     nature, must remain in effect beyond the termination of this License
+     shall survive.
+
+     8.2.  If You initiate litigation by asserting a patent infringement
+     claim (excluding declatory judgment actions) against Initial Developer
+     or a Contributor (the Initial Developer or Contributor against whom
+     You file such action is referred to as "Participant")  alleging that:
+
+     (a)  such Participant's Contributor Version directly or indirectly
+     infringes any patent, then any and all rights granted by such
+     Participant to You under Sections 2.1 and/or 2.2 of this License
+     shall, upon 60 days notice from Participant terminate prospectively,
+     unless if within 60 days after receipt of notice You either: (i)
+     agree in writing to pay Participant a mutually agreeable reasonable
+     royalty for Your past and future use of Modifications made by such
+     Participant, or (ii) withdraw Your litigation claim with respect to
+     the Contributor Version against such Participant.  If within 60 days
+     of notice, a reasonable royalty and payment arrangement are not
+     mutually agreed upon in writing by the parties or the litigation claim
+     is not withdrawn, the rights granted by Participant to You under
+     Sections 2.1 and/or 2.2 automatically terminate at the expiration of
+     the 60 day notice period specified above.
+
+     (b)  any software, hardware, or device, other than such Participant's
+     Contributor Version, directly or indirectly infringes any patent, then
+     any rights granted to You by such Participant under Sections 2.1(b)
+     and 2.2(b) are revoked effective as of the date You first made, used,
+     sold, distributed, or had made, Modifications made by that
+     Participant.
+
+     8.3.  If You assert a patent infringement claim against Participant
+     alleging that such Participant's Contributor Version directly or
+     indirectly infringes any patent where such claim is resolved (such as
+     by license or settlement) prior to the initiation of patent
+     infringement litigation, then the reasonable value of the licenses
+     granted by such Participant under Sections 2.1 or 2.2 shall be taken
+     into account in determining the amount or value of any payment or
+     license.
+
+     8.4.  In the event of termination under Sections 8.1 or 8.2 above,
+     all end user license agreements (excluding distributors and resellers)
+     which have been validly granted by You or any distributor hereunder
+     prior to termination shall survive termination.
+
+9. LIMITATION OF LIABILITY.
+
+     UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
+     (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
+     DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
+     OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
+     ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
+     CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
+     WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
+     COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
+     INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
+     LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
+     RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
+     PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
+     EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO
+     THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU.
+
+10. U.S. GOVERNMENT END USERS.
+
+     The Covered Code is a "commercial item," as that term is defined in
+     48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
+     software" and "commercial computer software documentation," as such
+     terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48
+     C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995),
+     all U.S. Government End Users acquire Covered Code with only those
+     rights set forth herein.
+
+11. MISCELLANEOUS.
+
+     This License represents the complete agreement concerning subject
+     matter hereof. If any provision of this License is held to be
+     unenforceable, such provision shall be reformed only to the extent
+     necessary to make it enforceable. This License shall be governed by
+     California law provisions (except to the extent applicable law, if
+     any, provides otherwise), excluding its conflict-of-law provisions.
+     With respect to disputes in which at least one party is a citizen of,
+     or an entity chartered or registered to do business in the United
+     States of America, any litigation relating to this License shall be
+     subject to the jurisdiction of the Federal Courts of the Northern
+     District of California, with venue lying in Santa Clara County,
+     California, with the losing party responsible for costs, including
+     without limitation, court costs and reasonable attorneys' fees and
+     expenses. The application of the United Nations Convention on
+     Contracts for the International Sale of Goods is expressly excluded.
+     Any law or regulation which provides that the language of a contract
+     shall be construed against the drafter shall not apply to this
+     License.
+
+12. RESPONSIBILITY FOR CLAIMS.
+
+     As between Initial Developer and the Contributors, each party is
+     responsible for claims and damages arising, directly or indirectly,
+     out of its utilization of rights under this License and You agree to
+     work with Initial Developer and Contributors to distribute such
+     responsibility on an equitable basis. Nothing herein is intended or
+     shall be deemed to constitute any admission of liability.
+
+13. MULTIPLE-LICENSED CODE.
+
+     Initial Developer may designate portions of the Covered Code as
+     "Multiple-Licensed".  "Multiple-Licensed" means that the Initial
+     Developer permits you to utilize portions of the Covered Code under
+     Your choice of the MPL or the alternative licenses, if any, specified
+     by the Initial Developer in the file described in Exhibit A.
+
+EXHIBIT A -Mozilla Public License.
+
+     ``The contents of this file are subject to the Mozilla Public License
+     Version 1.1 (the "License"); you may not use this file except in
+     compliance with the License. You may obtain a copy of the License at
+     http://www.mozilla.org/MPL/
+
+     Software distributed under the License is distributed on an "AS IS"
+     basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+     License for the specific language governing rights and limitations
+     under the License.
+
+     The Original Code is RabbitMQ.
+
+     The Initial Developer of the Original Code is GoPivotal, Inc.
+     Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+
+     Alternatively, the contents of this file may be used under the terms
+     of the GNU General Public License version 2 (the  "GPL2"), or
+     the Apache License version 2 (the "ASL2") in which case the
+     provisions of GPL2 or the ASL2 are applicable instead of those
+     above.  If you wish to allow use of your version of this file only
+     under the terms of the GPL2 or the ASL2 and not to allow others to use
+     your version of this file under the MPL, indicate your decision by
+     deleting  the provisions above and replace  them with the notice and
+     other provisions required by the GPL2 or the ASL2.  If you do not delete
+     the provisions above, a recipient may use your version of this file
+     under either the MPL, the GPL2 or the ASL2.''
+     
+     [NOTE: The text of this Exhibit A may differ slightly from the text of
+     the notices in the Source Code files of the Original Code. You should
+     use the text of this Exhibit A rather than the text found in the
+     Original Code Source Code for Your Modifications.]
diff --git a/deps/rabbitmq_sharding/LICENSE-MPL2 b/deps/rabbitmq_sharding/LICENSE-MPL2
new file mode 100644 (file)
index 0000000..14e2f77
--- /dev/null
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+    means each individual or legal entity that creates, contributes to
+    the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+    means the combination of the Contributions of others (if any) used
+    by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+    means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+    means Source Code Form to which the initial Contributor has attached
+    the notice in Exhibit A, the Executable Form of such Source Code
+    Form, and Modifications of such Source Code Form, in each case
+    including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+    means
+
+    (a) that the initial Contributor has attached the notice described
+        in Exhibit B to the Covered Software; or
+
+    (b) that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the
+        terms of a Secondary License.
+
+1.6. "Executable Form"
+    means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+    means a work that combines Covered Software with other material, in 
+    a separate file or files, that is not Covered Software.
+
+1.8. "License"
+    means this document.
+
+1.9. "Licensable"
+    means having the right to grant, to the maximum extent possible,
+    whether at the time of the initial grant or subsequently, any and
+    all of the rights conveyed by this License.
+
+1.10. "Modifications"
+    means any of the following:
+
+    (a) any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered
+        Software; or
+
+    (b) any new file in Source Code Form that contains any Covered
+        Software.
+
+1.11. "Patent Claims" of a Contributor
+    means any patent claim(s), including without limitation, method,
+    process, and apparatus claims, in any patent Licensable by such
+    Contributor that would be infringed, but for the grant of the
+    License, by the making, using, selling, offering for sale, having
+    made, import, or transfer of either its Contributions or its
+    Contributor Version.
+
+1.12. "Secondary License"
+    means either the GNU General Public License, Version 2.0, the GNU
+    Lesser General Public License, Version 2.1, the GNU Affero General
+    Public License, Version 3.0, or any later versions of those
+    licenses.
+
+1.13. "Source Code Form"
+    means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+    means an individual or a legal entity exercising rights under this
+    License. For legal entities, "You" includes any entity that
+    controls, is controlled by, or is under common control with You. For
+    purposes of this definition, "control" means (a) the power, direct
+    or indirect, to cause the direction or management of such entity,
+    whether by contract or otherwise, or (b) ownership of more than
+    fifty percent (50%) of the outstanding shares or beneficial
+    ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+    Licensable by such Contributor to use, reproduce, make available,
+    modify, display, perform, distribute, and otherwise exploit its
+    Contributions, either on an unmodified basis, with Modifications, or
+    as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+    for sale, have made, import, and otherwise transfer either its
+    Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+    or
+
+(b) for infringements caused by: (i) Your and any other third party's
+    modifications of Covered Software, or (ii) the combination of its
+    Contributions with other software (except as part of its Contributor
+    Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+    its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+    Form, as described in Section 3.1, and You must inform recipients of
+    the Executable Form how they can obtain a copy of such Source Code
+    Form by reasonable means in a timely manner, at a charge no more
+    than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+    License, or sublicense it under different terms, provided that the
+    license for the Executable Form does not attempt to limit or alter
+    the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+*                                                                      *
+*  6. Disclaimer of Warranty                                           *
+*  -------------------------                                           *
+*                                                                      *
+*  Covered Software is provided under this License on an "as is"       *
+*  basis, without warranty of any kind, either expressed, implied, or  *
+*  statutory, including, without limitation, warranties that the       *
+*  Covered Software is free of defects, merchantable, fit for a        *
+*  particular purpose or non-infringing. The entire risk as to the     *
+*  quality and performance of the Covered Software is with You.        *
+*  Should any Covered Software prove defective in any respect, You     *
+*  (not any Contributor) assume the cost of any necessary servicing,   *
+*  repair, or correction. This disclaimer of warranty constitutes an   *
+*  essential part of this License. No use of any Covered Software is   *
+*  authorized under this License except under this disclaimer.         *
+*                                                                      *
+************************************************************************
+
+************************************************************************
+*                                                                      *
+*  7. Limitation of Liability                                          *
+*  --------------------------                                          *
+*                                                                      *
+*  Under no circumstances and under no legal theory, whether tort      *
+*  (including negligence), contract, or otherwise, shall any           *
+*  Contributor, or anyone who distributes Covered Software as          *
+*  permitted above, be liable to You for any direct, indirect,         *
+*  special, incidental, or consequential damages of any character      *
+*  including, without limitation, damages for lost profits, loss of    *
+*  goodwill, work stoppage, computer failure or malfunction, or any    *
+*  and all other commercial damages or losses, even if such party      *
+*  shall have been informed of the possibility of such damages. This   *
+*  limitation of liability shall not apply to liability for death or   *
+*  personal injury resulting from such party's negligence to the       *
+*  extent applicable law prohibits such limitation. Some               *
+*  jurisdictions do not allow the exclusion or limitation of           *
+*  incidental or consequential damages, so this exclusion and          *
+*  limitation may not apply to You.                                    *
+*                                                                      *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+  This Source Code Form is subject to the terms of the Mozilla Public
+  License, v. 2.0. If a copy of the MPL was not distributed with this
+  file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+  This Source Code Form is "Incompatible With Secondary Licenses", as
+  defined by the Mozilla Public License, v. 2.0.
diff --git a/deps/rabbitmq_sharding/Makefile b/deps/rabbitmq_sharding/Makefile
new file mode 100644 (file)
index 0000000..34a928f
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_sharding
+
+DEPS = rabbit_common rabbit
+TEST_DEPS = rabbitmq_ct_helpers amqp_client
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_sharding/README.extra.md b/deps/rabbitmq_sharding/README.extra.md
new file mode 100644 (file)
index 0000000..9d51aa8
--- /dev/null
@@ -0,0 +1,79 @@
+# Additional information #
+
+Here you can find some extra information about how the plugin works
+and the reasons for it.
+
+## Why do we need this plugin? ##
+
+RabbitMQ queues are bound to the node where they were first
+declared. This means that even if you create a cluster of RabbitMQ
+brokers, at some point all message traffic will go to the node where
+the queue lives. What this plugin does is to give you a centralized
+place where to send your messages, plus __load balancing__ across many
+nodes, by adding queues to the other nodes in the cluster.
+
+The advantage of this setup is that the queues from where your
+consumers will get messages will be local to the node where they are
+connected.  On the other hand, the producers don't need to care about
+what's behind the exchange.
+
+All the plumbing to __automatically maintain__ the shard queues is
+done by the plugin. If you add more nodes to the cluster, then the
+plugin will __automatically create queues in those nodes__.
+
+If you remove nodes from the cluster then RabbitMQ will take care of
+taking them out of the list of bound queues. Message loss can happen
+in the case where a race occurs from a node going away and your
+message arriving to the shard exchange. If you can't afford to lose a
+message then you can use
+[publisher confirms](http://www.rabbitmq.com/confirms.html) to prevent
+message loss.
+
+## Message Ordering ##
+
+Message order is maintained per sharded queue, but not globally. This
+means that once a message entered a queue, then for that queue and the
+set of consumers attached to the queue, ordering will be preserved.
+
+If you need global ordering then stick with
+[mirrored queues](http://www.rabbitmq.com/ha.html).
+
+## What strategy is used for picking the queue name ##
+
+When you issue a `basic.consume`, the plugin will choose the queue
+with the _least amount of consumers_.  The queue will be local to the
+broker your client is connected to. Of course the local sharded queue
+will be part of the set of queues that belong to the chosen shard.
+
+## Intercepted Channel Behaviour ##
+
+This plugin works with the new `channel interceptors`. An interceptor
+basically allows a plugin to modify parts of an AMQP method. For
+example in this plugin case, whenever a user sends a `basic.consume`,
+the plugin will map the queue name sent by the user to one of the
+sharded queues.
+
+Also a plugin can decide that a certain AMQP method can't be performed
+on a queue that's managed by the plugin. In this case declaring a queue
+called `my_shard` doesn't make much sense when there's actually a
+sharded queue by that name. In this case the plugin will return a
+channel error to the user.
+
+These are the AMQP methods intercepted by the plugin, and the
+respective behaviour:
+
+- `'basic.consume', QueueName`: The plugin will pick the sharded queue
+  with the least amount of consumers from the `QueueName` shard.
+- `'basic.get', QueueName`: The plugin will pick the sharded queue
+  with the least amount of consumers from the `QueueName` shard.
+- `'queue.declare', QueueName`: The plugin rewrites `QueueName` to be
+  the first queue in the shard, so `queue.declare_ok` returns the stats
+  for that queue.
+- `'queue.bind', QueueName`: since there isn't an actual `QueueName`
+  queue, this method returns a channel error.
+- `'queue.unbind', QueueName`: since there isn't an actual `QueueName`
+  queue, this method returns a channel error.
+- `'queue.purge', QueueName`: since there isn't an actual `QueueName`
+  queue, this method returns a channel error.
+- `'queue.delete', QueueName`: since there isn't an actual `QueueName`
+  queue, this method returns a channel error.
diff --git a/deps/rabbitmq_sharding/README.md b/deps/rabbitmq_sharding/README.md
new file mode 100644 (file)
index 0000000..5186f7d
--- /dev/null
@@ -0,0 +1,159 @@
+# RabbitMQ Sharding Plugin #
+
+This plugin introduces the concept of sharded queues for
+RabbitMQ. Sharding is done at the exchange level, that is, messages
+will be partitioned across queues by one exchange that we should
+define as sharded. The machinery used behind the scenes implies
+defining an exchange that will partition, or shard messages across
+queues. The partitioning will be done automatically for you, i.e: once
+you define an exchange as _sharded_, then the supporting queues will
+be automatically created and messages will be sharded across them.
+
+The following graphic depicts how the plugin works from the standpoint
+of a publisher and a consumer:
+
+![Sharding Overview](http://hg.rabbitmq.com/rabbitmq-sharding/raw-file/6fea09e847d5/docs/sharded_queues.png)
+
+As you can see in the graphic, the producers publishes a series of
+messages, those messages get partitioned to different queues, and then
+our consumer get messages from one of those queues. Therefore if you
+have a partition with 3 queues, then you will need to have at least 3
+consumers to get all the messages from those queues.
+
+## Auto-scaling ##
+
+One interesting property of this plugin, is that if you add more nodes
+to your RabbitMQ cluster, then the plugin will automatically create
+more shards in the new node. Say you had a shard with 4 queues in
+`node a` and `node b` just joined the cluster. The plugin will
+automatically create 4 queues in `node b` and join them to the shard
+partition. Already delivered messages _will not_ be rebalanced, but
+newly arriving messages will be partitioned to the new queues.
+
+## Partitioning Messages ##
+
+The exchanges that ship by default with RabbitMQ work in a "all or
+nothing" fashion, i.e: if a routing key matches a set of queues bound
+to the exchange, then RabbitMQ will route the message to all the
+queues in that set. Therefore for this plugin to work, we need to
+route messages to an exchange that would partition messages, so they
+are routed to _at most_ one queue.
+
+The plugin provides a new exchange type `"x-modulus-hash"` that will use
+the traditional hashing technique applying to partition messages
+across queues.
+
+The `"x-modulus-hash"` exchange will hash the routing key used to
+publish the message and then it will apply a `Hash mod N` to pick the
+queue where to route the message, where N is the number of queues
+bound to the exchange. **This exchange will completely ignore the
+binding key used to bind the queue to the exchange**.
+
+You could also use other exchanges that have similar behaviour like
+the _Consistent Hash Exchange_ or the _Random Exchange_.  The first
+one has the advantage of shipping directly with RabbitMQ.
+
+If _just need message partitioning_ but not the automatic queue
+creation provided by this plugin, then you can just use the
+[Consistent Hash Exchange](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange).
+
+## Consuming from a sharded queue ##
+
+While the plugin creates a bunch of queues behind the scenes, the idea
+is that those queues act like a big logical queue where you consume
+messages from.
+
+An example should illustrate this better: let's say you declared the
+exchange _images_ to be a sharded exchange. Then RabbitMQ created
+behind the scenes queues _shard: - nodename images 1_, _shard: -
+nodename images 2_, _shard: - nodename images 3_ and _shard: -
+nodename images 4_. Of course you don't want to tie your application
+to the naming conventions of this plugin. What you would want to do is
+to perform a `basic.consume('images')` and let RabbitMQ figure out the
+rest. This plugin does exactly that.
+
+TL;DR: if you have a shard called _images_, then you can directly
+consume from a queue called _images_.
+
+How does it work? The plugin will chose the queue from the shard with
+the _least amount of consumers_, provided the queue contents are local
+to the broker you are connected to.
+
+**NOTE: there's a small race condition between RabbitMQ updating the
+queue's internal stats about consumers and when clients issue
+`basic.consume` commands.** The problem with this is that if your
+client issue many `basic.consume` commands without too much time in
+between, it might happen that the plugin assigns the consumers to
+queues in an uneven way.
+
+## Installing ##
+
+Install the corresponding .ez files from our
+[Community Plugins page](http://www.rabbitmq.com/community-plugins.html).
+
+Then run the following command:
+
+```bash
+rabbitmq-plugins enable rabbitmq_sharding
+```
+
+You'd probably want to also enable the Consistent Hash Exchange
+plugin.
+
+## Usage ##
+
+Once the plugin is installed you can define an exchange as sharded by
+setting up a policy that matches the exchange name. For example if we
+have the exchange called `shard.images`, we could define the following
+policy to shard it:
+
+```bash
+$CTL set_policy images-shard "^shard.images$" '{"shards-per-node": 2, "routing-key": "1234"}'
+```
+
+This will create `2` sharded queues per node in the cluster, and will
+bind those queues using the `"1234"` routing key.
+
+### About the routing-key policy definition ###
+
+In the example above we use the routing key `1234` when defining the
+policy. This means that the underlying exchanges used for sharding
+will bind the sharded queues to the exchange using the `1234` routing
+key specified above. This means that for a direct exchange, _only
+messages that are published with the routing key `1234` will be routed
+to the sharded queues. If you decide to use a fanout exchange for
+sharding, then the `1234` routing key, while used during binding, will
+be ignored by the exchange. If you use the `"x-modulus-hash"`
+exchange, then the routing key will be ignored as well. So depending
+on the exchange you use, will be the effect the `routing-key` policy
+definition has while routing messages.
+
+The `routing-key` policy definition is optional.
+
+## Building the plugin ##
+
+Get the RabbitMQ Public Umbrella ready as explained in the
+[RabbitMQ Plugin Development Guide](http://www.rabbitmq.com/plugin-development.html).
+
+Move to the umbrella folder an then run the following commands, to
+fetch dependencies:
+
+```bash
+make up
+cd ../rabbitmq-sharding
+make
+```
+
+## Plugin Status ##
+
+At the moment the plugin is __experimental__ in order to receive
+feedback from the community.
+
+## LICENSE ##
+
+See the LICENSE file.
+
+## Extra information ##
+
+Some information about how the plugin affects message ordering and
+some other details can be found in the file README.extra.md
diff --git a/deps/rabbitmq_sharding/docs/sharded_queues.png b/deps/rabbitmq_sharding/docs/sharded_queues.png
new file mode 100644 (file)
index 0000000..6ab26e6
Binary files /dev/null and b/deps/rabbitmq_sharding/docs/sharded_queues.png differ
diff --git a/deps/rabbitmq_sharding/erlang.mk b/deps/rabbitmq_sharding/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_sharding/etc/rabbit-hare.config b/deps/rabbitmq_sharding/etc/rabbit-hare.config
new file mode 100644 (file)
index 0000000..3ec308d
--- /dev/null
@@ -0,0 +1,3 @@
+%% -*- erlang -*-
+%% Note - we still need this for rabbit_sharding_test_util:plugin_dir/0 to work...
+[].
diff --git a/deps/rabbitmq_sharding/etc/rabbit-test.config b/deps/rabbitmq_sharding/etc/rabbit-test.config
new file mode 100644 (file)
index 0000000..3ec308d
--- /dev/null
@@ -0,0 +1,3 @@
+%% -*- erlang -*-
+%% Note - we still need this for rabbit_sharding_test_util:plugin_dir/0 to work...
+[].
diff --git a/deps/rabbitmq_sharding/etc/rkey.sh b/deps/rabbitmq_sharding/etc/rkey.sh
new file mode 100755 (executable)
index 0000000..bc72ef1
--- /dev/null
@@ -0,0 +1,8 @@
+#!/bin/sh
+CTL=$1
+
+curl -i -u guest:guest -H "content-type:application/json" \
+    -XPUT -d'{"type":"x-consistent-hash","durable":true}' \
+    http://localhost:15672/api/exchanges/%2f/rkey.ex
+
+$CTL set_policy rkey-shard "^rkey\."   '{"shards-per-node": 2, "routing-key": "1234"}'
diff --git a/deps/rabbitmq_sharding/rabbitmq-components.mk b/deps/rabbitmq_sharding/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl
new file mode 100644 (file)
index 0000000..f3f1921
--- /dev/null
@@ -0,0 +1,98 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Sharding Plugin
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_sharding_exchange_decorator).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "sharding exchange decorator"},
+                    {mfa, {rabbit_registry, register,
+                           [exchange_decorator, <<"sharding">>, ?MODULE]}},
+                    {cleanup, {rabbit_registry, unregister,
+                               [exchange_decorator, <<"sharding">>]}},
+                    {requires, rabbit_registry},
+                    {enables, recovery}]}).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-behaviour(rabbit_exchange_decorator).
+
+-export([description/0, serialise_events/1]).
+-export([create/2, delete/3, policy_changed/2,
+         add_binding/3, remove_bindings/3, route/2, active_for/1]).
+
+-import(rabbit_sharding_util, [shard/1]).
+
+%%----------------------------------------------------------------------------
+
+description() ->
+    [{description, <<"Shard exchange decorator">>}].
+
+serialise_events(_X) -> false.
+
+create(transaction, _X) ->
+    ok;
+create(none, X) ->
+    maybe_start_sharding(X),
+    ok.
+
+add_binding(_Tx, _X, _B) -> ok.
+remove_bindings(_Tx, _X, _Bs) -> ok.
+
+route(_, _) -> [].
+
+active_for(X) ->
+    case shard(X) of
+        true  -> noroute;
+        false -> none
+    end.
+
+%% we have to remove the policy from ?SHARDING_TABLE
+delete(transaction, _X, _Bs) -> ok;
+delete(none, X, _Bs) ->
+    maybe_stop_sharding(X),
+    ok.
+
+%% we have to remove the old policy from ?SHARDING_TABLE
+%% and then add the new one.
+policy_changed(OldX, NewX) ->
+    maybe_update_sharding(OldX, NewX),
+    ok.
+
+%%----------------------------------------------------------------------------
+
+maybe_update_sharding(OldX, NewX) ->
+    case shard(NewX) of
+        true  ->
+            rabbit_sharding_shard:maybe_update_shards(OldX, NewX);
+        false ->
+            rabbit_sharding_shard:stop_sharding(OldX)
+    end.
+
+maybe_start_sharding(X)->
+    case shard(X) of
+        true  ->
+            rabbit_sharding_shard:ensure_sharded_queues(X);
+        false ->
+            ok
+    end.
+
+maybe_stop_sharding(X) ->
+    case shard(X) of
+        true  ->
+            rabbit_sharding_shard:stop_sharding(X);
+        false ->
+            ok
+    end.
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl
new file mode 100644 (file)
index 0000000..ebfa1d7
--- /dev/null
@@ -0,0 +1,65 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Sharding Plugin
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_sharding_exchange_type_modulus_hash).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-behaviour(rabbit_exchange_type).
+
+-export([description/0, serialise_events/0, route/2]).
+-export([validate/1, validate_binding/2,
+         create/2, delete/3, policy_changed/2,
+         add_binding/3, remove_bindings/3, assert_args_equivalence/2]).
+
+-rabbit_boot_step(
+   {rabbit_sharding_exchange_type_modulus_hash_registry,
+    [{description, "exchange type x-modulus-hash: registry"},
+     {mfa,         {rabbit_registry, register,
+                    [exchange, <<"x-modulus-hash">>, ?MODULE]}},
+     {cleanup, {rabbit_registry, unregister,
+                [exchange, <<"x-modulus-hash">>]}},
+     {requires,    rabbit_registry},
+     {enables,     kernel_ready}]}).
+
+-define(PHASH2_RANGE, 134217728). %% 2^27
+
+description() ->
+    [{description, <<"Modulus Hashing Exchange">>}].
+
+serialise_events() -> false.
+
+route(#exchange{name = Name},
+      #delivery{message = #basic_message{routing_keys = Routes}}) ->
+    Qs = rabbit_router:match_routing_key(Name, ['_']),
+    case length(Qs) of
+        0 -> [];
+        N -> [lists:nth(hash_mod(Routes, N), Qs)]
+    end.
+
+validate(_X) -> ok.
+validate_binding(_X, _B) -> ok.
+create(_Tx, _X) -> ok.
+delete(_Tx, _X, _Bs) -> ok.
+policy_changed(_X1, _X2) -> ok.
+add_binding(_Tx, _X, _B) -> ok.
+remove_bindings(_Tx, _X, _Bs) -> ok.
+assert_args_equivalence(X, Args) ->
+    rabbit_exchange:assert_args_equivalence(X, Args).
+
+hash_mod(Routes, N) ->
+    M = erlang:phash2(Routes, ?PHASH2_RANGE) rem N,
+    M + 1. %% erlang lists are 1..N indexed.
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl
new file mode 100644 (file)
index 0000000..2001b68
--- /dev/null
@@ -0,0 +1,179 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Sharding Plugin
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_sharding_interceptor).
+
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+
+-behaviour(rabbit_channel_interceptor).
+
+-export([description/0, intercept/3, applies_to/0, init/1]).
+
+%% exported for tests
+-export([consumer_count/1]).
+
+-import(rabbit_sharding_util, [a2b/1, shards_per_node/1]).
+-import(rabbit_misc, [r/3, format/2, protocol_error/3]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "sharding interceptor"},
+                    {mfa, {rabbit_registry, register,
+                           [channel_interceptor,
+                            <<"sharding interceptor">>, ?MODULE]}},
+                    {cleanup, {rabbit_registry, unregister,
+                               [channel_interceptor,
+                                <<"sharding interceptor">>]}},
+                    {requires, rabbit_registry},
+                    {enables, recovery}]}).
+
+init(Ch) ->
+    rabbit_channel:get_vhost(Ch).
+
+description() ->
+    [{description, <<"Sharding interceptor for channel methods">>}].
+
+intercept(#'basic.consume'{queue = QName} = Method, Content, VHost) ->
+    case queue_name(VHost, QName) of
+        {ok, QName2} ->
+            {Method#'basic.consume'{queue = QName2}, Content};
+        {error, QName} ->
+            precondition_failed("Error finding sharded queue for: ~p", [QName])
+    end;
+
+intercept(#'basic.get'{queue = QName} = Method, Content, VHost) ->
+    case queue_name(VHost, QName) of
+        {ok, QName2} ->
+            {Method#'basic.get'{queue = QName2}, Content};
+        {error, QName} ->
+            precondition_failed("Error finding sharded queue for: ~p", [QName])
+    end;
+
+intercept(#'queue.delete'{queue = QName} = Method, Content, VHost) ->
+    case is_sharded(VHost, QName) of
+        true ->
+            precondition_failed("Can't delete sharded queue: ~p", [QName]);
+        _    ->
+            {Method, Content}
+    end;
+
+intercept(#'queue.declare'{queue = QName} = Method, Content, VHost) ->
+    case is_sharded(VHost, QName) of
+        true ->
+            %% Since as an interceptor we can't modify what the channel
+            %% will return, we then modify the queue name so the channel
+            %% can at least return a queue.declare_ok for that particular
+            %% queue. Picking the first queue over the others is totally
+            %% arbitrary.
+            QName2 = rabbit_sharding_util:make_queue_name(
+                                      QName, a2b(node()), 0),
+            {Method#'queue.declare'{queue = QName2}, Content};
+        _    ->
+            {Method, Content}
+    end;
+
+intercept(#'queue.bind'{queue = QName} = Method, Content, VHost) ->
+    case is_sharded(VHost, QName) of
+        true ->
+            precondition_failed("Can't bind sharded queue: ~p", [QName]);
+        _    ->
+            {Method, Content}
+    end;
+
+intercept(#'queue.unbind'{queue = QName} = Method, Content, VHost) ->
+    case is_sharded(VHost, QName) of
+        true ->
+            precondition_failed("Can't unbind sharded queue: ~p", [QName]);
+        _    ->
+            {Method, Content}
+    end;
+
+intercept(#'queue.purge'{queue = QName} = Method, Content, VHost) ->
+    case is_sharded(VHost, QName) of
+        true ->
+            precondition_failed("Can't purge sharded queue: ~p", [QName]);
+        _    ->
+            {Method, Content}
+    end;
+
+intercept(Method, Content, _VHost) ->
+    {Method, Content}.
+
+applies_to() ->
+    ['basic.consume', 'basic.get', 'queue.delete', 'queue.declare',
+     'queue.bind', 'queue.unbind', 'queue.purge'].
+
+%%----------------------------------------------------------------------------
+
+%% If the queue is not part of a shard, return unmodified name
+queue_name(VHost, QBin) ->
+    case lookup_exchange(VHost, QBin) of
+        {ok, X}  ->
+            case rabbit_sharding_util:shard(X) of
+                true ->
+                    least_consumers(VHost, QBin, shards_per_node(X));
+                _    ->
+                    {ok, QBin}
+            end;
+        _Error ->
+            {ok, QBin}
+    end.
+
+is_sharded(VHost, QBin) ->
+    case lookup_exchange(VHost, QBin) of
+        {ok, X} ->
+            rabbit_sharding_util:shard(X);
+        _Error ->
+            false
+    end.
+
+lookup_exchange(VHost, QBin) ->
+    rabbit_exchange:lookup(r(VHost, exchange, QBin)).
+
+least_consumers(VHost, QBin, N) ->
+    F = fun(QNum) ->
+                QBin2 = rabbit_sharding_util:make_queue_name(
+                          QBin, a2b(node()), QNum),
+                case consumer_count(r(VHost, queue, QBin2)) of
+                    {error, E}       -> {error, E};
+                    [{consumers, C}] -> {C, QBin2}
+                end
+
+        end,
+    case queues_with_count(F, N) of
+        []     ->
+            {error, QBin};
+        Queues ->
+            [{_, QBin3} | _ ] = lists:sort(Queues),
+            {ok, QBin3}
+    end.
+
+queues_with_count(F, N) ->
+    lists:foldl(fun (C, Acc) ->
+                        case F(C) of
+                            {error, _} -> Acc;
+                            Ret        -> [Ret|Acc]
+                        end
+                end, [], lists:seq(0, N-1)).
+
+consumer_count(QName) ->
+    rabbit_amqqueue:with(
+      QName,
+      fun(Q) ->
+              rabbit_amqqueue:info(Q, [consumers])
+      end).
+
+precondition_failed(Format, QName) ->
+    protocol_error(precondition_failed, Format, QName).
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl
new file mode 100644 (file)
index 0000000..e1b139f
--- /dev/null
@@ -0,0 +1,70 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Sharding Plugin
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_sharding_policy_validator).
+
+-behaviour(rabbit_policy_validator).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([register/0, validate_policy/1]).
+
+-rabbit_boot_step({?MODULE,
+                   [{description, "sharding parameters"},
+                    {mfa, {?MODULE, register, []}},
+                    {requires, rabbit_registry},
+                    {enables, recovery}]}).
+
+register() ->
+    [rabbit_registry:register(Class, Name, ?MODULE) ||
+        {Class, Name} <- [{policy_validator,  <<"shards-per-node">>},
+                          {policy_validator,  <<"routing-key">>}]],
+    ok.
+
+validate_policy(KeyList) ->
+    SPN = proplists:get_value(<<"shards-per-node">>, KeyList, none),
+    RKey = proplists:get_value(<<"routing-key">>, KeyList, none),
+    case {SPN, RKey} of
+        {none, none} ->
+            ok;
+        {none, _} ->
+            {error, "shards-per-node must be specified", []};
+        {SPN, none} ->
+            validate_shards_per_node(SPN);
+        {SPN, RKey} ->
+            case validate_shards_per_node(SPN) of
+                ok   -> validate_routing_key(RKey);
+                Else -> Else
+            end
+    end.
+
+%%----------------------------------------------------------------------------
+
+validate_shards_per_node(Term) when is_number(Term) ->
+    case Term >= 0 of
+        true  ->
+            ok;
+        false ->
+            {error, "shards-per-node should be greater than 0, actually was ~p",
+             [Term]}
+    end;
+validate_shards_per_node(Term) ->
+    {error, "shards-per-node should be a number, actually was ~p", [Term]}.
+
+validate_routing_key(Term) when is_binary(Term) ->
+    ok;
+validate_routing_key(Term) ->
+    {error, "routing-key should be binary, actually was ~p", [Term]}.
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl
new file mode 100644 (file)
index 0000000..da513aa
--- /dev/null
@@ -0,0 +1,139 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Sharding Plugin
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_sharding_shard).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([maybe_shard_exchanges/0,
+         ensure_sharded_queues/1,
+         maybe_update_shards/2,
+         stop_sharding/1]).
+
+-import(rabbit_misc, [r/3]).
+-import(rabbit_sharding_util, [a2b/1, exchange_bin/1, make_queue_name/3,
+                               routing_key/1, shards_per_node/1]).
+
+-rabbit_boot_step({rabbit_sharding_maybe_shard,
+                   [{description, "rabbit sharding maybe shard"},
+                    {mfa,         {?MODULE, maybe_shard_exchanges, []}},
+                    {requires,    recovery}]}).
+
+-define(MAX_CONNECTION_CLOSE_TIMEOUT, 10000).
+
+%% We make sure the sharded queues are created when
+%% RabbitMQ starts.
+maybe_shard_exchanges() ->
+    [maybe_shard_exchanges(V) || V <- rabbit_vhost:list()],
+    ok.
+
+maybe_shard_exchanges(VHost) ->
+    [ensure_sharded_queues(X) ||
+        X <- rabbit_sharding_util:sharded_exchanges(VHost)].
+
+%% queue needs to be declared on the respective node.
+ensure_sharded_queues(X) ->
+    add_queues(X),
+    bind_queues(X).
+
+maybe_update_shards(OldX, NewX) ->
+    maybe_unbind_queues(routing_key(OldX), routing_key(NewX), OldX),
+    add_queues(NewX),
+    bind_queues(NewX).
+
+stop_sharding(OldX) ->
+    unbind_queues(shards_per_node(OldX), OldX).
+
+%% routing key didn't change. Do nothing.
+maybe_unbind_queues(RK, RK, _OldX) ->
+    ok;
+maybe_unbind_queues(_RK, _NewRK, OldX) ->
+    unbind_queues(shards_per_node(OldX), OldX).
+
+unbind_queues(undefined, _X) ->
+    ok;
+unbind_queues(OldSPN, #exchange{name = XName} = X) ->
+    OldRKey = routing_key(X),
+    foreach_node(fun(Node) ->
+                         [unbind_queue(XName, OldRKey, N, Node)
+                          || N <- lists:seq(0, OldSPN-1)]
+                 end).
+
+add_queues(#exchange{name = XName, durable = Durable} = X) ->
+    SPN = shards_per_node(X),
+    foreach_node(fun(Node) ->
+                         [declare_queue(XName, Durable, N, Node)
+                          || N <- lists:seq(0, SPN-1)]
+                 end).
+
+bind_queues(#exchange{name = XName} = X) ->
+    RKey = routing_key(X),
+    SPN = shards_per_node(X),
+    foreach_node(fun(Node) ->
+                         [bind_queue(XName, RKey, N, Node) ||
+                             N <- lists:seq(0, SPN-1)]
+                 end).
+
+%%----------------------------------------------------------------------------
+
+declare_queue(XName, Durable, N, Node) ->
+    QBin = make_queue_name(exchange_bin(XName), a2b(Node), N),
+    QueueName = rabbit_misc:r(v(XName), queue, QBin),
+    try rabbit_amqqueue:declare(QueueName, Durable, false, [], none, Node) of
+        {_Reply, _Q} ->
+            ok
+    catch
+        _Error:Reason ->
+            rabbit_log:error("sharding failed to declare queue for exchange ~p"
+                             " - soft error:~n~p~n",
+                             [exchange_bin(XName), Reason]),
+            error
+    end.
+
+bind_queue(XName, RoutingKey, N, Node) ->
+    binding_action(fun rabbit_binding:add/2,
+                   XName, RoutingKey, N, Node,
+                   "sharding failed to bind queue ~p to exchange ~p"
+                   " - soft error:~n~p~n").
+
+unbind_queue(XName, RoutingKey, N, Node) ->
+    binding_action(fun rabbit_binding:remove/2,
+                   XName, RoutingKey, N, Node,
+                   "sharding failed to unbind queue ~p to exchange ~p"
+                   " - soft error:~n~p~n").
+
+binding_action(F, XName, RoutingKey, N, Node, ErrMsg) ->
+    QBin = make_queue_name(exchange_bin(XName), a2b(Node), N),
+    QueueName = rabbit_misc:r(v(XName), queue, QBin),
+    case F(#binding{source      = XName,
+                    destination = QueueName,
+                    key         = RoutingKey,
+                    args        = []},
+           fun (_X, _Q) -> ok end) of
+        ok              -> ok;
+        {error, Reason} ->
+            rabbit_log:error(ErrMsg, [QBin, exchange_bin(XName), Reason]),
+            error
+    end.
+
+v(#resource{virtual_host = VHost}) ->
+    VHost.
+
+foreach_node(F) ->
+    [F(Node) || Node <- running_nodes()].
+
+running_nodes() ->
+    proplists:get_value(running_nodes, rabbit_mnesia:status(), []).
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl
new file mode 100644 (file)
index 0000000..4aa6a09
--- /dev/null
@@ -0,0 +1,57 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ Sharding Plugin
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_sharding_util).
+
+-export([shard/1, sharded_exchanges/1]).
+-export([get_policy/2, shards_per_node/1, routing_key/1]).
+-export([exchange_bin/1, make_queue_name/3, a2b/1]).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-import(rabbit_misc, [pget/3]).
+
+shard(X) ->
+    case get_policy(<<"shards-per-node">>, X) of
+        undefined -> false;
+        _SPN      -> true
+    end.
+
+sharded_exchanges(VHost) ->
+    [X || X <- rabbit_exchange:list(VHost), shard(X)].
+
+shards_per_node(X) ->
+    get_policy(<<"shards-per-node">>, X).
+
+routing_key(X) ->
+    case get_policy(<<"routing-key">>, X) of
+        undefined ->
+            <<>>;
+        Value ->
+            Value
+    end.
+
+get_policy(Key, X) ->
+    rabbit_policy:get(Key, X).
+
+exchange_bin(#resource{name = XBin}) -> XBin.
+
+make_queue_name(QBin, NodeBin, QNum) ->
+    %% we do this to prevent unprintable characters in queue names
+    QNumBin = list_to_binary(lists:flatten(io_lib:format("~p", [QNum]))),
+    <<"sharding: ", QBin/binary, " - ", NodeBin/binary, " - ", QNumBin/binary>>.
+
+a2b(A) -> list_to_binary(atom_to_list(A)).
diff --git a/deps/rabbitmq_sharding/src/rabbitmq_sharding.app.src b/deps/rabbitmq_sharding/src/rabbitmq_sharding.app.src
new file mode 100644 (file)
index 0000000..8fb742a
--- /dev/null
@@ -0,0 +1,6 @@
+{application, rabbitmq_sharding,
+ [{description, "RabbitMQ Sharding Plugin"},
+  {vsn, "0.1.0"},
+  {modules, []},
+  {registered, []},
+  {applications, [kernel, stdlib, rabbit_common, rabbit]}]}.
diff --git a/deps/rabbitmq_shovel/CODE_OF_CONDUCT.md b/deps/rabbitmq_shovel/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_shovel/CONTRIBUTING.md b/deps/rabbitmq_shovel/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_shovel/Makefile b/deps/rabbitmq_shovel/Makefile
new file mode 100644 (file)
index 0000000..1922956
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_shovel
+
+DEPS = rabbit_common rabbit amqp_client
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_shovel/README.md b/deps/rabbitmq_shovel/README.md
new file mode 100644 (file)
index 0000000..f2eb61a
--- /dev/null
@@ -0,0 +1,22 @@
+## RabbitMQ Shovel
+
+RabbitMQ shovel is a WAN-friendly tool for moving messages from
+a queue to an exchange, typically between different nodes.
+
+
+## Supported RabbitMQ Versions
+
+This plugin ships with RabbitMQ, there is no need to
+install it separately.
+
+
+## Documentation        
+
+See [RabbitMQ shovel plugin](http://www.rabbitmq.com/shovel.html) on rabbitmq.com.
+
+
+## License and Copyright
+
+Released under [the same license as RabbitMQ](https://www.rabbitmq.com/mpl.html).
+
+2007-2016 (c) Pivotal Software Inc.
diff --git a/deps/rabbitmq_shovel/erlang.mk b/deps/rabbitmq_shovel/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 93%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/include/rabbit_shovel.hrl
rename to deps/rabbitmq_shovel/include/rabbit_shovel.hrl
index 5168c8f4b8675748de3315fae5ae2741d24d2631..e4e4d9e773f6753036a0ef4f9e30b9837ec1fdaf 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -record(endpoint,
diff --git a/deps/rabbitmq_shovel/rabbitmq-components.mk b/deps/rabbitmq_shovel/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel.erl
index c945321ad4822c0f44559337e1df6b58435cc1e0..a078db6b263bbc5d0157ccefb04cf37b234f1c91 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel).
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_config.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_config.erl
index a20b73bf1469d2595548d03019ca371d736cfef8..34e4315f662b2cc9237d1de025bc5c86d40b4b63 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_config).
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_dyn_worker_sup.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl
index 0e5991df4c336ec69a47ffdeaf5f3a801f6d695e..4dec9ea3316345f7690629a9d2a35f75dbbe060c 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_dyn_worker_sup).
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl
index 38bbd50a3c19fc092e36cd72056fab97aab1ecd2..0f6ee877b17a587b2c6b06a5774000365e178a38 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_dyn_worker_sup_sup).
@@ -44,7 +44,7 @@ start_child(Name, Def) ->
     case mirrored_supervisor:start_child(
            ?SUPERVISOR,
            {Name, {rabbit_shovel_dyn_worker_sup, start_link, [Name, Def]},
-            transient, ?MAX_WAIT, worker, [rabbit_shovel_dyn_worker_sup]}) of
+            transient, ?WORKER_WAIT, worker, [rabbit_shovel_dyn_worker_sup]}) of
         {ok,                      _Pid}  -> ok;
         {error, {already_started, _Pid}} -> ok
     end.
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_parameters.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl
index 49bbcac748e1db5d160428d50d9ecc58da66035b..2cb4b6034ae987f8e0e3d8196a3ca02083895578 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_parameters).
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_status.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_status.erl
index 37d738ec020da27623981f2b2a8fddd0554b55c7..41b6cefde3afc544649f0f1a9abb48adc9957fe8 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_status).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_sup.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl
index b0bce9db22cb0640847b20d7bcf81c307a9b97a7..6463c9d14565680f9190a726f8dcda6d3cb19750 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_sup).
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_util.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_util.erl
index a3b0f9cb14d9ec0e40760b0a28e83bdee9d22418..30a64b746b3e893e0cc96529b60ef733cfe6409a 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_util).
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_worker.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl
index e5a8f638597eb2ad82d52b52877d3a12b6ad535d..f2660fc57f7b9834c282e630aa0734bad833c88c 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_worker).
@@ -21,6 +21,9 @@
 -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
          code_change/3]).
 
+%% for testing purposes
+-export([get_connection_name/1]).
+
 -include_lib("amqp_client/include/amqp_client.hrl").
 -include("rabbit_shovel.hrl").
 
@@ -54,12 +57,12 @@ handle_call(_Msg, _From, State) ->
     {noreply, State}.
 
 handle_cast(init, State = #state{config = Config}) ->
-    random:seed(now()),
     #shovel{sources = Sources, destinations = Destinations} = Config,
+    #state{name = Name} = State,
     {InboundConn, InboundChan, InboundURI} =
-        make_conn_and_chan(Sources#endpoint.uris),
+        make_conn_and_chan(Sources#endpoint.uris, Name),
     {OutboundConn, OutboundChan, OutboundURI} =
-        make_conn_and_chan(Destinations#endpoint.uris),
+        make_conn_and_chan(Destinations#endpoint.uris, Name),
 
     %% Don't trap exits until we have established connections so that
     %% if we try to shut down while waiting for a connection to be
@@ -228,14 +231,30 @@ publish(Tag, Method, Msg,
                         decr_remaining(1, State)
       end).
 
-make_conn_and_chan(URIs) ->
-    URI = lists:nth(random:uniform(length(URIs)), URIs),
+make_conn_and_chan(URIs, ShovelName) ->
+    URI = lists:nth(rand_compat:uniform(length(URIs)), URIs),
     {ok, AmqpParam} = amqp_uri:parse(URI),
-    {ok, Conn} = amqp_connection:start(AmqpParam),
+    ConnName = get_connection_name(ShovelName),
+    {ok, Conn} = amqp_connection:start(AmqpParam, ConnName),
     link(Conn),
     {ok, Chan} = amqp_connection:open_channel(Conn),
     {Conn, Chan, list_to_binary(amqp_uri:remove_credentials(URI))}.
 
+%% for static shovels, name is an atom from the configuration file
+get_connection_name(ShovelName) when is_atom(ShovelName) ->
+    Prefix = <<"Shovel ">>,
+    ShovelNameAsBinary = atom_to_binary(ShovelName, utf8),
+    <<Prefix/binary, ShovelNameAsBinary/binary>>;
+
+%% for dynamic shovels, name is a tuple with a binary
+get_connection_name({_, Name}) when is_binary(Name) ->
+    Prefix = <<"Shovel ">>,
+    <<Prefix/binary, Name/binary>>;
+
+%% fallback
+get_connection_name(_) ->
+    <<"Shovel">>.
+
 remaining(_Ch, #shovel{delete_after = never}) ->
     unlimited;
 remaining(Ch, #shovel{delete_after = 'queue-length', queue = Queue}) ->
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbit_shovel_worker_sup.erl
rename to deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl
index 1705d5f0f0e2d91a28ea892fb8de8a3dd4696560..0c972ca6b917b73812a00f25bf29014fc935332f 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_worker_sup).
similarity index 81%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel/src/rabbitmq_shovel.app.src
rename to deps/rabbitmq_shovel/src/rabbitmq_shovel.app.src
index 895aa757b43d601c9168d65e3ebb5ef31f654bcd..40c538d7730b960b54ba5cbd8e0b03ca20271a09 100644 (file)
@@ -1,6 +1,6 @@
 {application, rabbitmq_shovel,
  [{description, "Data Shovel for RabbitMQ"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {env, [{defaults, [{prefetch_count,     1000},
@@ -10,4 +10,4 @@
                      {reconnect_delay,    5}]
          }]},
   {mod, {rabbit_shovel, []}},
-  {applications, [kernel, stdlib, rabbit, amqp_client]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, amqp_client]}]}.
diff --git a/deps/rabbitmq_shovel_management/CODE_OF_CONDUCT.md b/deps/rabbitmq_shovel_management/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_shovel_management/CONTRIBUTING.md b/deps/rabbitmq_shovel_management/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_shovel_management/Makefile b/deps/rabbitmq_shovel_management/Makefile
new file mode 100644 (file)
index 0000000..3c0aef0
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_shovel_management
+
+DEPS = rabbit_common rabbit rabbitmq_management rabbitmq_shovel webmachine
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_shovel_management/README.md b/deps/rabbitmq_shovel_management/README.md
new file mode 100644 (file)
index 0000000..7d68155
--- /dev/null
@@ -0,0 +1,34 @@
+# RabbitMQ Shovel Management Plugin
+
+Adds information on shovel status to the management plugin. Build it
+like any other plugin.
+
+If you have a heterogenous cluster (where the nodes have different
+plugins installed), this should be installed on the same nodes as the
+management plugin.
+
+
+## Installing
+
+This plugin ships with RabbitMQ. Enable it with
+
+```
+[sudo] rabbitmq-plugins rabbitmq_shovel_management
+```
+
+
+## Usage
+
+When the plugin is enabled, you'll find a shovel management
+link under the Admin tab.
+
+The HTTP API is very small:
+
+ * `GET /api/shovels`
+
+
+## License and Copyright
+
+Released under [the same license as RabbitMQ](https://www.rabbitmq.com/mpl.html).
+
+2007-2016 (c) Pivotal Software Inc.
diff --git a/deps/rabbitmq_shovel_management/erlang.mk b/deps/rabbitmq_shovel_management/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_shovel_management/rabbitmq-components.mk b/deps/rabbitmq_shovel_management/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel-management/src/rabbit_shovel_mgmt.erl
rename to deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl
index 3f5c846360a276562ae2bec126c8d45153c714a8..27f09379d8604e0486944940b3fe0e7420068b08 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_shovel_mgmt).
similarity index 53%
rename from rabbitmq-server/plugins-src/rabbitmq-shovel-management/src/rabbitmq_shovel_management.app.src
rename to deps/rabbitmq_shovel_management/src/rabbitmq_shovel_management.app.src
index 61e4ca0458b9cc07769055217e1bb3210a5ff5ba..fc6891132ba8471e00eeff4cb14ca90ad8264112 100644 (file)
@@ -1,6 +1,6 @@
 {application, rabbitmq_shovel_management,
  [{description, "Shovel Status"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
-  {applications, [kernel, stdlib, rabbit, rabbitmq_management]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_management]}]}.
diff --git a/deps/rabbitmq_stomp/CODE_OF_CONDUCT.md b/deps/rabbitmq_stomp/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_stomp/CONTRIBUTING.md b/deps/rabbitmq_stomp/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_stomp/Makefile b/deps/rabbitmq_stomp/Makefile
new file mode 100644 (file)
index 0000000..4d421e8
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_stomp
+
+DEPS = ranch rabbit_common rabbit amqp_client
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
similarity index 70%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/README.md
rename to deps/rabbitmq_stomp/README.md
index 782d56331adf662775d0f270c32a73b890ae8532..92de2e5e626c528966fea5085d1e6d7e9383ebf9 100644 (file)
@@ -12,3 +12,7 @@ it, use <href="http://www.rabbitmq.com/man/rabbitmq-plugins.1.man.html">rabbitmq
 ## Documentation
 
 [RabbitMQ STOMP plugin documentation](http://www.rabbitmq.com/stomp.html).
+
+## Continuous Integration
+
+[![Build Status](https://travis-ci.org/rabbitmq/rabbitmq-stomp.svg?branch=master)](https://travis-ci.org/rabbitmq/rabbitmq-stomp)
diff --git a/deps/rabbitmq_stomp/erlang.mk b/deps/rabbitmq_stomp/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 87%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/include/rabbit_stomp.hrl
rename to deps/rabbitmq_stomp/include/rabbit_stomp.hrl
index d1497f4dd0d98f1b7f35912ef581442dfdf9a35b..455d31019e83b8d158983767977e7d5a9200032f 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -record(stomp_configuration, {default_login,
                               default_passcode,
+                              force_default_creds = false,
                               implicit_connect,
                               ssl_cert_login}).
 
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/include/rabbit_stomp_frame.hrl
rename to deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl
index 77d5810c164db55ff404f76b44bf16db05954874..256bb91ac80cf9c1b63dd349f696806600dbe84f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -record(stomp_frame, {command, headers, body_iolist}).
similarity index 87%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/include/rabbit_stomp_headers.hrl
rename to deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl
index 3aff8b50dfec24685958f43d9c916c573ff29fe4..4591d768d8310cd546539fa3880bebf3b7b3d48c 100644 (file)
 -define(HEADER_ACK, "ack").
 -define(HEADER_AMQP_MESSAGE_ID, "amqp-message-id").
 -define(HEADER_APP_ID, "app-id").
+-define(HEADER_AUTO_DELETE, "auto-delete").
 -define(HEADER_CONTENT_ENCODING, "content-encoding").
 -define(HEADER_CONTENT_LENGTH, "content-length").
 -define(HEADER_CONTENT_TYPE, "content-type").
 -define(HEADER_CORRELATION_ID, "correlation-id").
 -define(HEADER_DESTINATION, "destination").
+-define(HEADER_DURABLE, "durable").
 -define(HEADER_EXPIRATION, "expiration").
+-define(HEADER_EXCLUSIVE, "exclusive").
 -define(HEADER_HEART_BEAT, "heart-beat").
 -define(HEADER_HOST, "host").
 -define(HEADER_ID, "id").
@@ -51,6 +54,7 @@
 -define(HEADER_X_MAX_LENGTH_BYTES, "x-max-length-bytes").
 -define(HEADER_X_MAX_PRIORITY, "x-max-priority").
 -define(HEADER_X_MESSAGE_TTL, "x-message-ttl").
+-define(HEADER_X_QUEUE_NAME, "x-queue-name").
 
 -define(MESSAGE_ID_SEPARATOR, "@@").
 
                            ?HEADER_X_MAX_PRIORITY,
                            ?HEADER_X_MESSAGE_TTL
                           ]).
+
+-define(HEADER_PARAMS, [
+                        ?HEADER_AUTO_DELETE,
+                        ?HEADER_DURABLE,
+                        ?HEADER_EXCLUSIVE,
+                        ?HEADER_PERSISTENT
+                       ]).
diff --git a/deps/rabbitmq_stomp/rabbitmq-components.mk b/deps/rabbitmq_stomp/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 97%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp.erl
rename to deps/rabbitmq_stomp/src/rabbit_stomp.erl
index bd867aab1a2e7b791ddea8661bdb8a09eac8d889..d352a06d54974b2b257c0f51bceec1cc05c73e28 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_stomp).
similarity index 68%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_client_sup.erl
rename to deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl
index 4f293edb1c735b48d90d43f8e4adebd7b25f34d9..e972a57285c3852557c3a3844fed3e50515ed214 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_stomp_client_sup).
 -behaviour(supervisor2).
+-behaviour(ranch_protocol).
 
--define(MAX_WAIT, 16#ffffffff).
--export([start_link/1, init/1]).
+-include_lib("rabbit_common/include/rabbit.hrl").
 
-start_link(Configuration) ->
+-export([start_link/4, init/1]).
+
+start_link(Ref, Sock, _Transport, Configuration) ->
     {ok, SupPid} = supervisor2:start_link(?MODULE, []),
     {ok, HelperPid} =
         supervisor2:start_child(SupPid,
@@ -28,14 +30,7 @@ start_link(Configuration) ->
                                  {rabbit_connection_helper_sup, start_link, []},
                                  intrinsic, infinity, supervisor,
                                  [rabbit_connection_helper_sup]}),
-    %% The processor is intrinsic. When it exits, the supervisor goes too.
-    {ok, ProcessorPid} =
-        supervisor2:start_child(SupPid,
-                                {rabbit_stomp_processor,
-                                 {rabbit_stomp_processor, start_link,
-                                  [Configuration]},
-                                 intrinsic, ?MAX_WAIT, worker,
-                                 [rabbit_stomp_processor]}),
+
     %% We want the reader to be transient since when it exits normally
     %% the processor may have some work still to do (and the reader
     %% tells the processor to exit). However, if the reader terminates
@@ -44,8 +39,8 @@ start_link(Configuration) ->
                         SupPid,
                         {rabbit_stomp_reader,
                          {rabbit_stomp_reader,
-                          start_link, [HelperPid, ProcessorPid, Configuration]},
-                         transient, ?MAX_WAIT, worker,
+                          start_link, [HelperPid, Ref, Sock, Configuration]},
+                         intrinsic, ?WORKER_WAIT, worker,
                          [rabbit_stomp_reader]}),
 
     {ok, SupPid, ReaderPid}.
similarity index 99%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_frame.erl
rename to deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl
index e6cedcfac07741f3a665181a9393f5c35b6aba1d..a24a164d7d636b724bbbbcfd46adf3316251808f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% stomp_frame implements the STOMP framing protocol "version 1.0", as
similarity index 78%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_processor.erl
rename to deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl
index 80edb6676382b1269bd9c805707482b29271c07d..8fb68fa0179a8bd946cecd9a386476a35bd278a0 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_stomp_processor).
--behaviour(gen_server2).
 
--export([start_link/1, init_arg/2, process_frame/2, flush_and_die/1]).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2,
-         code_change/3, terminate/2]).
+-export([initial_state/2, process_frame/2, flush_and_die/1]).
+-export([flush_pending_receipts/3,
+         handle_exit/3,
+         cancel_consumer/2,
+         send_delivery/5]).
+
+-export([adapter_name/1]).
 
 -include_lib("amqp_client/include/amqp_client.hrl").
 -include_lib("amqp_client/include/rabbit_routing_prefixes.hrl").
@@ -27,7 +30,7 @@
 -include("rabbit_stomp.hrl").
 -include("rabbit_stomp_headers.hrl").
 
--record(state, {session_id, channel, connection, subscriptions,
+-record(proc_state, {session_id, channel, connection, subscriptions,
                 version, start_heartbeat_fun, pending_receipts,
                 config, route_state, reply_queues, frame_transformer,
                 adapter_info, send_fun, ssl_login_name, peer_addr,
 
 -define(FLUSH_TIMEOUT, 60000).
 
+adapter_name(State) ->
+  #proc_state{adapter_info = #amqp_adapter_info{name = Name}} = State,
+  Name.
+
 %%----------------------------------------------------------------------------
-%% Public API
-%%----------------------------------------------------------------------------
-start_link(Args) ->
-    gen_server2:start_link(?MODULE, Args, []).
 
-init_arg(ProcessorPid, InitArgs) ->
-    gen_server2:cast(ProcessorPid, {init, InitArgs}).
+-spec initial_state(
+  #stomp_configuration{},
+  {SendFun, AdapterInfo, SSLLoginName, PeerAddr})
+    -> #proc_state{}
+  when SendFun :: fun((atom(), binary()) -> term()),
+       AdapterInfo :: #amqp_adapter_info{},
+       SSLLoginName :: atom() | binary(),
+       PeerAddr :: inet:ip_address().
+
+-type process_frame_result() ::
+    {ok, #proc_state{}} |
+    {stop, term(), #proc_state{}}.
+
+-spec process_frame(#stomp_frame{}, #proc_state{}) ->
+    process_frame_result().
+
+-spec flush_and_die(#proc_state{}) -> ok.
 
-process_frame(Pid, Frame = #stomp_frame{command = "SEND"}) ->
-    credit_flow:send(Pid),
-    gen_server2:cast(Pid, {"SEND", Frame, self()});
-process_frame(Pid, Frame = #stomp_frame{command = Command}) ->
-    gen_server2:cast(Pid, {Command, Frame, noflow}).
+-spec command({Command, Frame}, State) -> process_frame_result()
+    when Command :: string(),
+         Frame   :: #stomp_frame{},
+         State   :: #proc_state{}.
+
+-type process_fun() :: fun((#proc_state{}) ->
+        {ok, #stomp_frame{}, #proc_state{}}  |
+        {error, string(), string(), #proc_state{}} |
+        {stop, term(), #proc_state{}}).
+-spec process_request(process_fun(), fun((#proc_state{}) -> #proc_state{}), #proc_state{}) ->
+    process_frame_result().
+
+-spec flush_pending_receipts(DeliveryTag, IsMulti, State) -> State
+    when State :: #proc_state{},
+         DeliveryTag :: term(),
+         IsMulti :: boolean().
+
+-spec handle_exit(From, Reason, State) -> unknown_exit | {stop, Reason, State}
+    when State  :: #proc_state{},
+         From   :: pid(),
+         Reason :: term().
+
+-spec cancel_consumer(binary(), #proc_state{}) -> process_frame_result().
+
+-spec send_delivery(#'basic.deliver'{}, term(), term(), term(),
+                    #proc_state{}) -> #proc_state{}.
+
+%%----------------------------------------------------------------------------
 
-flush_and_die(Pid) ->
-    gen_server2:cast(Pid, flush_and_die).
 
 %%----------------------------------------------------------------------------
-%% Basic gen_server2 callbacks
+%% Public API
 %%----------------------------------------------------------------------------
 
-init(Configuration) ->
-    process_flag(trap_exit, true),
-    {ok,
-     #state {
+process_frame(Frame = #stomp_frame{command = Command}, State) ->
+    command({Command, Frame}, State).
+
+flush_and_die(State) ->
+    close_connection(State).
+
+initial_state(Configuration,
+    {SendFun, AdapterInfo0 = #amqp_adapter_info{additional_info = Extra},
+     SSLLoginName, PeerAddr}) ->
+  %% STOMP connections use exactly one channel. The frame max is not
+  %% applicable and there is no way to know what client is used.
+  AdapterInfo = AdapterInfo0#amqp_adapter_info{additional_info=[
+       {channels, 1},
+       {channel_max, 1},
+       {frame_max, 0},
+       %% TODO: can we use a header to make it possible for clients
+       %%       to override this value?
+       {client_properties, [{<<"product">>, longstr, <<"STOMP client">>}]}
+       |Extra]},
+  #proc_state {
+       send_fun            = SendFun,
+       adapter_info        = AdapterInfo,
+       ssl_login_name      = SSLLoginName,
+       peer_addr           = PeerAddr,
        session_id          = none,
        channel             = none,
        connection          = none,
@@ -74,123 +133,78 @@ init(Configuration) ->
        route_state         = rabbit_routing_util:init_state(),
        reply_queues        = dict:new(),
        frame_transformer   = undefined,
-       trailing_lf         = rabbit_misc:get_env(rabbitmq_stomp, trailing_lf, true)},
-     hibernate,
-     {backoff, 1000, 1000, 10000}
-    }.
-
-terminate(_Reason, State) ->
-    close_connection(State).
-
-handle_cast({init, [SendFun, AdapterInfo, StartHeartbeatFun, SSLLoginName,
-                    PeerAddr]},
-            State) ->
-    {noreply, State #state { send_fun            = SendFun,
-                             adapter_info        = AdapterInfo,
-                             start_heartbeat_fun = StartHeartbeatFun,
-                             ssl_login_name      = SSLLoginName,
-                             peer_addr           = PeerAddr}};
+       trailing_lf         = rabbit_misc:get_env(rabbitmq_stomp, trailing_lf, true)}.
 
-handle_cast(flush_and_die, State) ->
-    {stop, normal, close_connection(State)};
 
-handle_cast({"STOMP", Frame, noflow}, State) ->
+command({"STOMP", Frame}, State) ->
     process_connect(no_implicit, Frame, State);
 
-handle_cast({"CONNECT", Frame, noflow}, State) ->
+command({"CONNECT", Frame}, State) ->
     process_connect(no_implicit, Frame, State);
 
-handle_cast(Request, State = #state{channel = none,
-                                     config = #stomp_configuration{
-                                      implicit_connect = true}}) ->
-    {noreply, State1 = #state{channel = Ch}, _} =
+command(Request, State = #proc_state{channel = none,
+                             config = #stomp_configuration{
+                             implicit_connect = true}}) ->
+    {ok, State1 = #proc_state{channel = Ch}, _} =
         process_connect(implicit, #stomp_frame{headers = []}, State),
     case Ch of
         none -> {stop, normal, State1};
-        _    -> handle_cast(Request, State1)
+        _    -> command(Request, State1)
     end;
 
-handle_cast(_Request, State = #state{channel = none,
-                                     config = #stomp_configuration{
-                                      implicit_connect = false}}) ->
-    {noreply,
-     send_error("Illegal command",
-                "You must log in using CONNECT first",
-                State),
-     hibernate};
-
-handle_cast({Command, Frame, FlowPid},
-            State = #state{frame_transformer = FT}) ->
-    case FlowPid of
-        noflow -> ok;
-        _      -> credit_flow:ack(FlowPid)
-    end,
+command(_Request, State = #proc_state{channel = none,
+                              config = #stomp_configuration{
+                              implicit_connect = false}}) ->
+    {ok, send_error("Illegal command",
+                    "You must log in using CONNECT first",
+                    State), none};
+
+command({Command, Frame}, State = #proc_state{frame_transformer = FT}) ->
     Frame1 = FT(Frame),
     process_request(
       fun(StateN) ->
-              case validate_frame(Command, Frame1, StateN) of
-                  R = {error, _, _, _} -> R;
-                  _                    -> handle_frame(Command, Frame1, StateN)
-              end
+          case validate_frame(Command, Frame1, StateN) of
+              R = {error, _, _, _} -> R;
+              _                    -> handle_frame(Command, Frame1, StateN)
+          end
       end,
       fun(StateM) -> ensure_receipt(Frame1, StateM) end,
-      State);
-
-handle_cast(client_timeout,
-            State = #state{adapter_info = #amqp_adapter_info{name = S}}) ->
-    rabbit_log:warning("STOMP detected missed client heartbeat(s) "
-                       "on connection ~s, closing it~n", [S]),
-    {stop, {shutdown, client_heartbeat_timeout}, close_connection(State)}.
-
-handle_info(#'basic.consume_ok'{}, State) ->
-    {noreply, State, hibernate};
-handle_info(#'basic.cancel_ok'{}, State) ->
-    {noreply, State, hibernate};
-handle_info(#'basic.ack'{delivery_tag = Tag, multiple = IsMulti}, State) ->
-    {noreply, flush_pending_receipts(Tag, IsMulti, State), hibernate};
-handle_info({Delivery = #'basic.deliver'{},
-             #amqp_msg{props = Props, payload = Payload},
-             DeliveryCtx}, State) ->
-    State1 = send_delivery(Delivery, Props, Payload, State),
-    amqp_channel:notify_received(DeliveryCtx),
-    {noreply, State1, hibernate};
-handle_info(#'basic.cancel'{consumer_tag = Ctag}, State) ->
-    process_request(
-      fun(StateN) -> server_cancel_consumer(Ctag, StateN) end, State);
-handle_info({'EXIT', Conn,
-             {shutdown, {server_initiated_close, Code, Explanation}}},
-            State = #state{connection = Conn}) ->
+      State).
+
+cancel_consumer(Ctag, State) ->
+  process_request(
+    fun(StateN) -> server_cancel_consumer(Ctag, StateN) end,
+    State).
+
+handle_exit(Conn, {shutdown, {server_initiated_close, Code, Explanation}},
+            State = #proc_state{connection = Conn}) ->
     amqp_death(Code, Explanation, State);
-handle_info({'EXIT', Conn,
-             {shutdown, {connection_closing,
-                         {server_initiated_close, Code, Explanation}}}},
-            State = #state{connection = Conn}) ->
+handle_exit(Conn, {shutdown, {connection_closing,
+                    {server_initiated_close, Code, Explanation}}},
+            State = #proc_state{connection = Conn}) ->
     amqp_death(Code, Explanation, State);
-handle_info({'EXIT', Conn, Reason}, State = #state{connection = Conn}) ->
+handle_exit(Conn, Reason, State = #proc_state{connection = Conn}) ->
     send_error("AMQP connection died", "Reason: ~p", [Reason], State),
     {stop, {conn_died, Reason}, State};
 
-handle_info({'EXIT', Ch, Reason}, State = #state{channel = Ch}) ->
+handle_exit(Ch, {shutdown, {server_initiated_close, Code, Explanation}},
+            State = #proc_state{channel = Ch}) ->
+    amqp_death(Code, Explanation, State);
+
+handle_exit(Ch, Reason, State = #proc_state{channel = Ch}) ->
     send_error("AMQP channel died", "Reason: ~p", [Reason], State),
     {stop, {channel_died, Reason}, State};
-handle_info({'EXIT', Ch,
-             {shutdown, {server_initiated_close, Code, Explanation}}},
-            State = #state{channel = Ch}) ->
+handle_exit(Ch, {shutdown, {server_initiated_close, Code, Explanation}},
+            State = #proc_state{channel = Ch}) ->
     amqp_death(Code, Explanation, State);
+handle_exit(_, _, _) -> unknown_exit.
 
 
-handle_info({inet_reply, _, ok}, State) ->
-    {noreply, State, hibernate};
-handle_info({bump_credit, Msg}, State) ->
-    credit_flow:handle_bump_msg(Msg),
-    {noreply, State, hibernate};
-handle_info({inet_reply, _, Status}, State) ->
-    {stop, Status, State}.
-
 process_request(ProcessFun, State) ->
     process_request(ProcessFun, fun (StateM) -> StateM end, State).
 
-process_request(ProcessFun, SuccessFun, State) ->
+
+process_request(ProcessFun, SuccessFun, State=#proc_state{connection=Conn}) ->
     Res = case catch ProcessFun(State) of
               {'EXIT',
                {{shutdown,
@@ -208,9 +222,9 @@ process_request(ProcessFun, SuccessFun, State) ->
                 none -> ok;
                 _    -> send_frame(Frame, NewState)
             end,
-            {noreply, SuccessFun(NewState), hibernate};
+            {ok, SuccessFun(NewState), Conn};
         {error, Message, Detail, NewState} ->
-            {noreply, send_error(Message, Detail, NewState), hibernate};
+            {ok, send_error(Message, Detail, NewState), Conn};
         {stop, normal, NewState} ->
             {stop, normal, SuccessFun(NewState)};
         {stop, R, NewState} ->
@@ -218,7 +232,7 @@ process_request(ProcessFun, SuccessFun, State) ->
     end.
 
 process_connect(Implicit, Frame,
-                State = #state{channel        = none,
+                State = #proc_state{channel        = none,
                                config         = Config,
                                ssl_login_name = SSLLoginName,
                                adapter_info   = AdapterInfo}) ->
@@ -238,7 +252,7 @@ process_connect(Implicit, Frame,
                               login_header(Frame1, ?HEADER_HEART_BEAT, "0,0"),
                               AdapterInfo#amqp_adapter_info{
                                 protocol = {ProtoName, Version}}, Version,
-                              StateN#state{frame_transformer = FT}),
+                              StateN#proc_state{frame_transformer = FT}),
                       case {Res, Implicit} of
                           {{ok, _, StateN1}, implicit} -> ok(StateN1);
                           _                            -> Res
@@ -252,6 +266,10 @@ process_connect(Implicit, Frame,
       end,
       State).
 
+creds(_, _, #stomp_configuration{default_login       = DefLogin,
+                                 default_passcode    = DefPasscode,
+                                 force_default_creds = true}) ->
+    {DefLogin, DefPasscode};
 creds(Frame, SSLLoginName,
       #stomp_configuration{default_login    = DefLogin,
                            default_passcode = DefPasscode}) ->
@@ -281,13 +299,18 @@ frame_transformer(_) -> fun(Frame) -> Frame end.
 %% Frame Validation
 %%----------------------------------------------------------------------------
 
+report_missing_id_header(State) ->
+    error("Missing Header",
+          "Header 'id' is required for durable subscriptions", State).
+
 validate_frame(Command, Frame, State)
   when Command =:= "SUBSCRIBE" orelse Command =:= "UNSUBSCRIBE" ->
     Hdr = fun(Name) -> rabbit_stomp_frame:header(Frame, Name) end,
-    case {Hdr(?HEADER_PERSISTENT), Hdr(?HEADER_ID)} of
-        {{ok, "true"}, not_found} ->
-            error("Missing Header",
-                  "Header 'id' is required for durable subscriptions", State);
+    case {Hdr(?HEADER_DURABLE), Hdr(?HEADER_PERSISTENT), Hdr(?HEADER_ID)} of
+        {{ok, "true"}, _, not_found} ->
+            report_missing_id_header(State);
+        {_, {ok, "true"}, not_found} ->
+            report_missing_id_header(State);
         _ ->
             ok(State)
     end;
@@ -340,7 +363,7 @@ handle_frame(Command, _Frame, State) ->
 %%----------------------------------------------------------------------------
 
 ack_action(Command, Frame,
-           State = #state{subscriptions = Subs,
+           State = #proc_state{subscriptions = Subs,
                           channel       = Channel,
                           version       = Version}, MethodFun) ->
     AckHeader = rabbit_stomp_util:ack_header_name(Version),
@@ -382,7 +405,7 @@ ack_action(Command, Frame,
 %%----------------------------------------------------------------------------
 %% Internal helpers for processing frames callbacks
 %%----------------------------------------------------------------------------
-server_cancel_consumer(ConsumerTag, State = #state{subscriptions = Subs}) ->
+server_cancel_consumer(ConsumerTag, State = #proc_state{subscriptions = Subs}) ->
     case dict:find(ConsumerTag, Subs) of
         error ->
             error("Server cancelled unknown subscription",
@@ -416,7 +439,7 @@ cancel_subscription({error, _}, _Frame, State) ->
           State);
 
 cancel_subscription({ok, ConsumerTag, Description}, Frame,
-                    State = #state{subscriptions = Subs,
+                    State = #proc_state{subscriptions = Subs,
                                    channel       = Channel}) ->
     case dict:find(ConsumerTag, Subs) of
         error ->
@@ -441,18 +464,17 @@ cancel_subscription({ok, ConsumerTag, Description}, Frame,
     end.
 
 tidy_canceled_subscription(ConsumerTag, #subscription{dest_hdr = DestHdr},
-                           Frame, State = #state{subscriptions = Subs}) ->
+                           Frame, State = #proc_state{subscriptions = Subs}) ->
     Subs1 = dict:erase(ConsumerTag, Subs),
     {ok, Dest} = rabbit_routing_util:parse_endpoint(DestHdr),
-    maybe_delete_durable_sub(Dest, Frame, State#state{subscriptions = Subs1}).
+    maybe_delete_durable_sub(Dest, Frame, State#proc_state{subscriptions = Subs1}).
 
 maybe_delete_durable_sub({topic, Name}, Frame,
-                         State = #state{channel = Channel}) ->
-    case rabbit_stomp_frame:boolean_header(Frame,
-                                           ?HEADER_PERSISTENT, false) of
+                         State = #proc_state{channel = Channel}) ->
+    case rabbit_stomp_util:has_durable_header(Frame) of
         true ->
             {ok, Id} = rabbit_stomp_frame:header(Frame, ?HEADER_ID),
-            QName = rabbit_stomp_util:subscription_queue_name(Name, Id),
+            QName = rabbit_stomp_util:subscription_queue_name(Name, Id, Frame),
             amqp_channel:call(Channel,
                               #'queue.delete'{queue  = list_to_binary(QName),
                                               nowait = false}),
@@ -520,7 +542,7 @@ without_headers([], Command, Frame, State, Fun) ->
 do_login(undefined, _, _, _, _, _, State) ->
     error("Bad CONNECT", "Missing login or passcode header(s)", State);
 do_login(Username, Passwd, VirtualHost, Heartbeat, AdapterInfo, Version,
-         State = #state{peer_addr = Addr}) ->
+         State = #proc_state{peer_addr = Addr}) ->
     case start_connection(
            #amqp_params_direct{username     = Username,
                                password     = Passwd,
@@ -532,24 +554,33 @@ do_login(Username, Passwd, VirtualHost, Heartbeat, AdapterInfo, Version,
             link(Channel),
             amqp_channel:enable_delivery_flow_control(Channel),
             SessionId = rabbit_guid:string(rabbit_guid:gen_secure(), "session"),
-            {{SendTimeout, ReceiveTimeout}, State1} =
-                ensure_heartbeats(Heartbeat, State),
-            ok("CONNECTED",
-               [{?HEADER_SESSION, SessionId},
-                {?HEADER_HEART_BEAT,
-                 io_lib:format("~B,~B", [SendTimeout, ReceiveTimeout])},
-                {?HEADER_SERVER, server_header()},
-                {?HEADER_VERSION, Version}],
+            {SendTimeout, ReceiveTimeout} = ensure_heartbeats(Heartbeat),
+
+          Headers = [{?HEADER_SESSION, SessionId},
+                     {?HEADER_HEART_BEAT,
+                      io_lib:format("~B,~B", [SendTimeout, ReceiveTimeout])},
+                     {?HEADER_VERSION, Version}],
+          ok("CONNECTED",
+              case rabbit_misc:get_env(rabbitmq_stomp, hide_server_info, false) of
+                true  -> Headers;
+                false -> [{?HEADER_SERVER, server_header()} | Headers]
+              end,
                "",
-               State1#state{session_id = SessionId,
-                            channel    = Channel,
-                            connection = Connection,
-                            version    = Version});
+               State#proc_state{session_id = SessionId,
+                                channel    = Channel,
+                                connection = Connection,
+                                version    = Version});
         {error, {auth_failure, _}} ->
             rabbit_log:warning("STOMP login failed for user ~p~n",
                                [binary_to_list(Username)]),
             error("Bad CONNECT", "Access refused for user '" ++
                   binary_to_list(Username) ++ "'~n", [], State);
+        {error, not_allowed} ->
+            rabbit_log:warning("STOMP login failed - not_allowed "
+                               "(vhost access not allowed)~n"),
+            error("Bad CONNECT", "Virtual host '" ++
+                                 binary_to_list(VirtualHost) ++
+                                 "' access denied", State);
         {error, access_refused} ->
             rabbit_log:warning("STOMP login failed - access_refused "
                                "(vhost access not allowed)~n"),
@@ -579,7 +610,7 @@ server_header() ->
     rabbit_misc:format("~s/~s", [Product, Version]).
 
 do_subscribe(Destination, DestHdr, Frame,
-             State = #state{subscriptions = Subs,
+             State = #proc_state{subscriptions = Subs,
                             route_state   = RouteState,
                             channel       = Channel}) ->
     Prefetch =
@@ -630,7 +661,7 @@ do_subscribe(Destination, DestHdr, Frame,
                             end,
                             exit(Err)
                     end,
-                    ok(State#state{subscriptions =
+                    ok(State#proc_state{subscriptions =
                                        dict:store(
                                          ConsumerTag,
                                          #subscription{dest_hdr    = DestHdr,
@@ -644,7 +675,7 @@ do_subscribe(Destination, DestHdr, Frame,
             Err
     end.
 
-maybe_clean_up_queue(Queue, #state{connection = Connection}) ->
+maybe_clean_up_queue(Queue, #proc_state{connection = Connection}) ->
     {ok, Channel} = amqp_connection:open_channel(Connection),
     catch amqp_channel:call(Channel, #'queue.delete'{queue = Queue}),
     catch amqp_channel:close(Channel),
@@ -652,13 +683,13 @@ maybe_clean_up_queue(Queue, #state{connection = Connection}) ->
 
 do_send(Destination, _DestHdr,
         Frame = #stomp_frame{body_iolist = BodyFragments},
-        State = #state{channel = Channel, route_state = RouteState}) ->
+        State = #proc_state{channel = Channel, route_state = RouteState}) ->
     case ensure_endpoint(dest, Destination, Frame, Channel, RouteState) of
 
         {ok, _Q, RouteState1} ->
 
             {Frame1, State1} =
-                ensure_reply_to(Frame, State#state{route_state = RouteState1}),
+                ensure_reply_to(Frame, State#proc_state{route_state = RouteState1}),
 
             Props = rabbit_stomp_util:message_properties(Frame1),
 
@@ -707,11 +738,12 @@ negotiate_version(Frame) ->
 
 
 send_delivery(Delivery = #'basic.deliver'{consumer_tag = ConsumerTag},
-              Properties, Body,
-              State = #state{session_id    = SessionId,
-                             subscriptions = Subs,
-                             version       = Version}) ->
-    case dict:find(ConsumerTag, Subs) of
+              Properties, Body, DeliveryCtx,
+              State = #proc_state{
+                          session_id  = SessionId,
+                          subscriptions = Subs,
+                          version       = Version}) ->
+    NewState = case dict:find(ConsumerTag, Subs) of
         {ok, #subscription{ack_mode = AckMode}} ->
             send_frame(
               "MESSAGE",
@@ -724,18 +756,20 @@ send_delivery(Delivery = #'basic.deliver'{consumer_tag = ConsumerTag},
                        "There is no current subscription with tag '~s'.",
                        [ConsumerTag],
                        State)
-    end.
+    end,
+    amqp_channel:notify_received(DeliveryCtx),
+    NewState.
 
 
 send_method(Method, Channel, State) ->
     amqp_channel:call(Channel, Method),
     State.
 
-send_method(Method, State = #state{channel = Channel}) ->
+send_method(Method, State = #proc_state{channel = Channel}) ->
     send_method(Method, Channel, State).
 
 send_method(Method, Properties, BodyFragments,
-            State = #state{channel = Channel}) ->
+            State = #proc_state{channel = Channel}) ->
     send_method(Method, Channel, Properties, BodyFragments, State).
 
 send_method(Method = #'basic.publish'{}, Channel, Properties, BodyFragments,
@@ -746,13 +780,13 @@ send_method(Method = #'basic.publish'{}, Channel, Properties, BodyFragments,
                 payload = list_to_binary(BodyFragments)}),
     State.
 
-close_connection(State = #state{connection = none}) ->
+close_connection(State = #proc_state{connection = none}) ->
     State;
 %% Closing the connection will close the channel and subchannels
-close_connection(State = #state{connection = Connection}) ->
+close_connection(State = #proc_state{connection = Connection}) ->
     %% ignore noproc or other exceptions to avoid debris
     catch amqp_connection:close(Connection),
-    State#state{channel = none, connection = none, subscriptions = none}.
+    State#proc_state{channel = none, connection = none, subscriptions = none}.
 
 %%----------------------------------------------------------------------------
 %% Reply-To
@@ -777,7 +811,7 @@ ensure_reply_to(Frame = #stomp_frame{headers = Headers}, State) ->
             end
     end.
 
-ensure_reply_queue(TempQueueId, State = #state{channel       = Channel,
+ensure_reply_queue(TempQueueId, State = #proc_state{channel       = Channel,
                                                reply_queues  = RQS,
                                                subscriptions = Subs}) ->
     case dict:find(TempQueueId, RQS) of
@@ -807,7 +841,7 @@ ensure_reply_queue(TempQueueId, State = #state{channel       = Channel,
                                              multi_ack = false},
                                Subs),
 
-            {Destination, State#state{
+            {Destination, State#proc_state{
                             reply_queues  = dict:store(TempQueueId, Queue, RQS),
                             subscriptions = Subs1}}
     end.
@@ -828,7 +862,7 @@ do_receipt("SEND", _, State) ->
 do_receipt(_Frame, ReceiptId, State) ->
     send_frame("RECEIPT", [{"receipt-id", ReceiptId}], "", State).
 
-maybe_record_receipt(Frame, State = #state{channel          = Channel,
+maybe_record_receipt(Frame, State = #proc_state{channel          = Channel,
                                            pending_receipts = PR}) ->
     case rabbit_stomp_frame:header(Frame, ?HEADER_RECEIPT) of
         {ok, Id} ->
@@ -843,18 +877,18 @@ maybe_record_receipt(Frame, State = #state{channel          = Channel,
                           PR
                   end,
             SeqNo = amqp_channel:next_publish_seqno(Channel),
-            State#state{pending_receipts = gb_trees:insert(SeqNo, Id, PR1)};
+            State#proc_state{pending_receipts = gb_trees:insert(SeqNo, Id, PR1)};
         not_found ->
             State
     end.
 
 flush_pending_receipts(DeliveryTag, IsMulti,
-                       State = #state{pending_receipts = PR}) ->
+                       State = #proc_state{pending_receipts = PR}) ->
     {Receipts, PR1} = accumulate_receipts(DeliveryTag, IsMulti, PR),
     State1 = lists:foldl(fun(ReceiptId, StateN) ->
                                  do_receipt(none, ReceiptId, StateN)
                          end, State, Receipts),
-    State1#state{pending_receipts = PR1}.
+    State1#proc_state{pending_receipts = PR1}.
 
 accumulate_receipts(DeliveryTag, false, PR) ->
     case gb_trees:lookup(DeliveryTag, PR) of
@@ -958,22 +992,16 @@ perform_transaction_action({Method, Props, BodyFragments}, State) ->
 %% Heartbeat Management
 %%--------------------------------------------------------------------
 
-ensure_heartbeats(Heartbeats,
-                  State = #state{start_heartbeat_fun = SHF,
-                                 send_fun            = RawSendFun}) ->
+ensure_heartbeats(Heartbeats) ->
+
     [CX, CY] = [list_to_integer(X) ||
                    X <- re:split(Heartbeats, ",", [{return, list}])],
 
-    SendFun = fun() -> RawSendFun(sync, <<$\n>>) end,
-    Pid = self(),
-    ReceiveFun = fun() -> gen_server2:cast(Pid, client_timeout) end,
-
     {SendTimeout, ReceiveTimeout} =
         {millis_to_seconds(CY), millis_to_seconds(CX)},
 
-    SHF(SendTimeout, SendFun, ReceiveTimeout, ReceiveFun),
-
-    {{SendTimeout * 1000 , ReceiveTimeout * 1000}, State}.
+    rabbit_stomp_reader:start_heartbeats(self(), {SendTimeout, ReceiveTimeout}),
+    {SendTimeout * 1000 , ReceiveTimeout * 1000}.
 
 millis_to_seconds(M) when M =< 0   -> 0;
 millis_to_seconds(M) when M < 1000 -> 1;
@@ -988,35 +1016,31 @@ ensure_endpoint(_Direction, {queue, []}, _Frame, _Channel, _State) ->
 
 ensure_endpoint(source, EndPoint, {_, _, Headers, _} = Frame, Channel, State) ->
     Params =
-        case rabbit_stomp_frame:boolean_header(
-               Frame, ?HEADER_PERSISTENT, false) of
-            true ->
-                [{subscription_queue_name_gen,
-                  fun () ->
-                          {ok, Id} = rabbit_stomp_frame:header(Frame, ?HEADER_ID),
-                          {_, Name} = rabbit_routing_util:parse_routing(EndPoint),
-                          list_to_binary(
-                            rabbit_stomp_util:subscription_queue_name(Name,
-                                                                      Id))
-                  end},
-                 {durable, true}];
-            false ->
-                [{subscription_queue_name_gen,
-                  fun () ->
-                          Id = rabbit_guid:gen_secure(),
-                          {_, Name} = rabbit_routing_util:parse_routing(EndPoint),
-                          list_to_binary(
-                            rabbit_stomp_util:subscription_queue_name(Name,
-                                                                      Id))
-                  end},
-                 {durable, false}]
-        end,
+        [{subscription_queue_name_gen,
+          fun () ->
+              Id = build_subscription_id(Frame),
+              {_, Name} = rabbit_routing_util:parse_routing(EndPoint),
+              list_to_binary(rabbit_stomp_util:subscription_queue_name(Name, Id, Frame))
+          end
+         }] ++ rabbit_stomp_util:build_params(EndPoint, Headers),
     Arguments = rabbit_stomp_util:build_arguments(Headers),
-    rabbit_routing_util:ensure_endpoint(source, Channel, EndPoint, [Arguments | Params], State);
+    rabbit_routing_util:ensure_endpoint(source, Channel, EndPoint,
+                                        [Arguments | Params], State);
 
-ensure_endpoint(Direction, Endpoint, {_, _, Headers, _}, Channel, State) ->
+ensure_endpoint(Direction, EndPoint, {_, _, Headers, _}, Channel, State) ->
+    Params = rabbit_stomp_util:build_params(EndPoint, Headers),
     Arguments = rabbit_stomp_util:build_arguments(Headers),
-    rabbit_routing_util:ensure_endpoint(Direction, Channel, Endpoint, [Arguments], State).
+    rabbit_routing_util:ensure_endpoint(Direction, Channel, EndPoint,
+                                        [Arguments | Params], State).
+
+build_subscription_id(Frame) ->
+    case rabbit_stomp_util:has_durable_header(Frame) of
+        true ->
+            {ok, Id} = rabbit_stomp_frame:header(Frame, ?HEADER_ID),
+            Id;
+        false ->
+            rabbit_guid:gen_secure()
+    end.
 
 %%----------------------------------------------------------------------------
 %% Success/error handling
@@ -1066,7 +1090,7 @@ send_frame(Command, Headers, BodyFragments, State) ->
                             body_iolist = BodyFragments},
                State).
 
-send_frame(Frame, State = #state{send_fun = SendFun,
+send_frame(Frame, State = #proc_state{send_fun = SendFun,
                                  trailing_lf = TrailingLF}) ->
     SendFun(async, rabbit_stomp_frame:serialize(Frame, TrailingLF)),
     State.
@@ -1088,11 +1112,3 @@ send_error(Message, Detail, State) ->
 send_error(Message, Format, Args, State) ->
     send_error(Message, rabbit_misc:format(Format, Args), State).
 
-%%----------------------------------------------------------------------------
-%% Skeleton gen_server2 callbacks
-%%----------------------------------------------------------------------------
-handle_call(_Msg, _From, State) ->
-    {noreply, State}.
-
-code_change(_OldVsn, State, _Extra) ->
-    {ok, State}.
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl
new file mode 100644 (file)
index 0000000..d56a1fe
--- /dev/null
@@ -0,0 +1,378 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_stomp_reader).
+-behaviour(gen_server2).
+
+-export([start_link/4]).
+-export([conserve_resources/3]).
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+         code_change/3, terminate/2]).
+-export([start_heartbeats/2]).
+
+-include("rabbit_stomp.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+-record(reader_state, {socket, conn_name, parse_state, processor_state, state,
+                       conserve_resources, recv_outstanding, stats_timer,
+                       parent, connection, heartbeat_sup, heartbeat}).
+
+%%----------------------------------------------------------------------------
+
+start_link(SupHelperPid, Ref, Sock, Configuration) ->
+    Pid = proc_lib:spawn_link(?MODULE, init,
+                              [[SupHelperPid, Ref, Sock, Configuration]]),
+
+    %% In the event that somebody floods us with connections, the
+    %% reader processes can spew log events at error_logger faster
+    %% than it can keep up, causing its mailbox to grow unbounded
+    %% until we eat all the memory available and crash. So here is a
+    %% meaningless synchronous call to the underlying gen_event
+    %% mechanism. When it returns the mailbox is drained, and we
+    %% return to our caller to accept more connections.
+    gen_event:which_handlers(error_logger),
+
+    {ok, Pid}.
+
+log(Level, Fmt, Args) -> rabbit_log:log(connection, Level, Fmt, Args).
+
+init([SupHelperPid, Ref, Sock, Configuration]) ->
+    process_flag(trap_exit, true),
+    rabbit_net:accept_ack(Ref, Sock),
+
+    case rabbit_net:connection_string(Sock, inbound) of
+        {ok, ConnStr} ->
+            ProcInitArgs = processor_args(Configuration, Sock),
+            ProcState = rabbit_stomp_processor:initial_state(Configuration,
+                                                             ProcInitArgs),
+
+            log(info, "accepting STOMP connection ~p (~s)~n",
+                [self(), ConnStr]),
+
+            ParseState = rabbit_stomp_frame:initial_state(),
+            register_resource_alarm(),
+            gen_server2:enter_loop(?MODULE, [],
+              rabbit_event:init_stats_timer(
+                run_socket(control_throttle(
+                  #reader_state{socket             = Sock,
+                                conn_name          = ConnStr,
+                                parse_state        = ParseState,
+                                processor_state    = ProcState,
+                                heartbeat_sup      = SupHelperPid,
+                                heartbeat          = {none, none},
+                                state              = running,
+                                conserve_resources = false,
+                                recv_outstanding   = false})), #reader_state.stats_timer),
+              {backoff, 1000, 1000, 10000});
+        {network_error, Reason} ->
+            rabbit_net:fast_close(Sock),
+            terminate({shutdown, Reason}, undefined);
+        {error, enotconn} ->
+            rabbit_net:fast_close(Sock),
+            terminate(shutdown, undefined);
+        {error, Reason} ->
+            rabbit_net:fast_close(Sock),
+            terminate({network_error, Reason}, undefined)
+    end.
+
+
+handle_call(Msg, From, State) ->
+    {stop, {stomp_unexpected_call, Msg, From}, State}.
+
+handle_cast(client_timeout, State) ->
+    {stop, {shutdown, client_heartbeat_timeout}, State};
+handle_cast(Msg, State) ->
+    {stop, {stomp_unexpected_cast, Msg}, State}.
+
+
+handle_info({inet_async, _Sock, _Ref, {ok, Data}}, State) ->
+    case process_received_bytes(Data, State#reader_state{recv_outstanding = false}) of
+      {ok, NewState} ->
+          {noreply, ensure_stats_timer(run_socket(control_throttle(NewState))), hibernate};
+      {stop, Reason, NewState} ->
+          {stop, Reason, NewState}
+    end;
+handle_info({inet_async, _Sock, _Ref, {error, closed}}, State) ->
+    {stop, normal, State};
+handle_info({inet_async, _Sock, _Ref, {error, Reason}}, State) ->
+    {stop, {inet_error, Reason}, State};
+handle_info({inet_reply, _Sock, {error, closed}}, State) ->
+    {stop, normal, State};
+handle_info({inet_reply, _, ok}, State) ->
+    {noreply, State, hibernate};
+handle_info({inet_reply, _, Status}, State) ->
+    {stop, Status, State};
+handle_info(emit_stats, State) ->
+    {noreply, emit_stats(State), hibernate};
+handle_info({conserve_resources, Conserve}, State) ->
+    NewState = State#reader_state{conserve_resources = Conserve},
+    {noreply, run_socket(control_throttle(NewState)), hibernate};
+handle_info({bump_credit, Msg}, State) ->
+    credit_flow:handle_bump_msg(Msg),
+    {noreply, run_socket(control_throttle(State)), hibernate};
+
+%%----------------------------------------------------------------------------
+
+handle_info(client_timeout, State) ->
+    {stop, {shutdown, client_heartbeat_timeout}, State};
+
+%%----------------------------------------------------------------------------
+
+handle_info(#'basic.consume_ok'{}, State) ->
+    {noreply, State, hibernate};
+handle_info(#'basic.cancel_ok'{}, State) ->
+    {noreply, State, hibernate};
+handle_info(#'basic.ack'{delivery_tag = Tag, multiple = IsMulti}, State) ->
+    ProcState = processor_state(State),
+    NewProcState = rabbit_stomp_processor:flush_pending_receipts(Tag,
+                                                                 IsMulti,
+                                                                 ProcState),
+    {noreply, processor_state(NewProcState, State), hibernate};
+handle_info({Delivery = #'basic.deliver'{},
+             #amqp_msg{props = Props, payload = Payload},
+             DeliveryCtx},
+             State) ->
+    ProcState = processor_state(State),
+    NewProcState = rabbit_stomp_processor:send_delivery(Delivery,
+                                                        Props,
+                                                        Payload,
+                                                        DeliveryCtx,
+                                                        ProcState),
+    {noreply, processor_state(NewProcState, State), hibernate};
+handle_info(#'basic.cancel'{consumer_tag = Ctag}, State) ->
+    ProcState = processor_state(State),
+    case rabbit_stomp_processor:cancel_consumer(Ctag, ProcState) of
+      {ok, NewProcState, _} ->
+        {noreply, processor_state(NewProcState, State), hibernate};
+      {stop, Reason, NewProcState} ->
+        {stop, Reason, processor_state(NewProcState, State)}
+    end;
+
+handle_info({start_heartbeats, {0, 0}}, State) -> 
+    {noreply, State};
+
+handle_info({start_heartbeats, {SendTimeout, ReceiveTimeout}},
+            State = #reader_state{heartbeat_sup = SupPid, socket = Sock}) ->
+
+    SendFun = fun() -> catch rabbit_net:send(Sock, <<$\n>>) end,
+    Pid = self(),
+    ReceiveFun = fun() -> gen_server2:cast(Pid, client_timeout) end,
+    Heartbeat = rabbit_heartbeat:start(SupPid, Sock, SendTimeout,
+                                       SendFun, ReceiveTimeout, ReceiveFun),
+    {noreply, State#reader_state{heartbeat = Heartbeat}};
+
+
+%%----------------------------------------------------------------------------
+handle_info({'EXIT', From, Reason}, State) ->
+  ProcState = processor_state(State),
+  case rabbit_stomp_processor:handle_exit(From, Reason, ProcState) of
+    {stop, NewReason, NewProcState} ->
+        {stop, NewReason, processor_state(NewProcState, State)};
+    unknown_exit ->
+        {stop, {connection_died, Reason}, State}
+  end.
+%%----------------------------------------------------------------------------
+
+process_received_bytes([], State) ->
+    {ok, State};
+process_received_bytes(Bytes,
+                       State = #reader_state{
+                         processor_state = ProcState,
+                         parse_state     = ParseState}) ->
+    case rabbit_stomp_frame:parse(Bytes, ParseState) of
+        {more, ParseState1} ->
+            {ok, State#reader_state{parse_state = ParseState1}};
+        {ok, Frame, Rest} ->
+            case rabbit_stomp_processor:process_frame(Frame, ProcState) of
+                {ok, NewProcState, Conn} ->
+                    PS = rabbit_stomp_frame:initial_state(),
+                    NextState = maybe_block(State, Frame),
+                    process_received_bytes(Rest, NextState#reader_state{
+                        processor_state = NewProcState,
+                        parse_state     = PS,
+                        connection      = Conn});
+                {stop, Reason, NewProcState} ->
+                    {stop, Reason,
+                     processor_state(NewProcState, State)}
+            end;
+        {error, Reason} ->
+            %% The parser couldn't parse data. We log the reason right
+            %% now and stop with the reason 'normal' instead of the
+            %% actual parsing error, because the supervisor would log
+            %% a crash report (which is not that useful) and handle
+            %% recovery, but it's too slow.
+            log_reason({network_error, Reason}, State),
+            {stop, normal, State}
+    end.
+
+conserve_resources(Pid, _Source, {_, Conserve, _}) ->
+    Pid ! {conserve_resources, Conserve},
+    ok.
+
+register_resource_alarm() ->
+    rabbit_alarm:register(self(), {?MODULE, conserve_resources, []}).
+
+
+control_throttle(State = #reader_state{state              = CS,
+                                       conserve_resources = Mem,
+                                       heartbeat = Heartbeat}) ->
+    case {CS, Mem orelse credit_flow:blocked()} of
+        {running,   true} -> State#reader_state{state = blocking};
+        {blocking, false} -> rabbit_heartbeat:resume_monitor(Heartbeat),
+                             State#reader_state{state = running};
+        {blocked,  false} -> rabbit_heartbeat:resume_monitor(Heartbeat),
+                             State#reader_state{state = running};
+        {_,            _} -> State
+    end.
+
+maybe_block(State = #reader_state{state = blocking, heartbeat = Heartbeat}, 
+            #stomp_frame{command = "SEND"}) ->
+    rabbit_heartbeat:pause_monitor(Heartbeat),
+    State#reader_state{state = blocked};
+maybe_block(State, _) ->
+    State.
+
+run_socket(State = #reader_state{state = blocked}) ->
+    State;
+run_socket(State = #reader_state{recv_outstanding = true}) ->
+    State;
+run_socket(State = #reader_state{socket = Sock}) ->
+    rabbit_net:async_recv(Sock, 0, infinity),
+    State#reader_state{recv_outstanding = true}.
+
+
+terminate(Reason, State = #reader_state{ processor_state = ProcState }) ->
+  maybe_emit_stats(State),
+  log_reason(Reason, State),
+  rabbit_stomp_processor:flush_and_die(ProcState),
+  ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+
+log_reason({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) ->
+    log(error, "STOMP detected TLS upgrade error on ~s: connection closed~n",
+        [ConnStr]);
+
+log_reason({network_error,
+           {ssl_upgrade_error,
+            {tls_alert, "handshake failure"}}, ConnStr}, _State) ->
+    log(error, "STOMP detected TLS upgrade error on ~s: handshake failure~n",
+        [ConnStr]);
+
+log_reason({network_error,
+           {ssl_upgrade_error,
+            {tls_alert, "unknown ca"}}, ConnStr}, _State) ->
+    log(error, "STOMP detected TLS certificate verification error on ~s: alert 'unknown CA'~n",
+        [ConnStr]);
+
+log_reason({network_error,
+           {ssl_upgrade_error,
+            {tls_alert, Alert}}, ConnStr}, _State) ->
+    log(error, "STOMP detected TLS upgrade error on ~s: alert ~s~n",
+        [ConnStr, Alert]);
+
+log_reason({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) ->
+    log(error, "STOMP detected TLS upgrade error on ~s: ~p~n",
+        [ConnStr, Reason]);
+
+log_reason({network_error, Reason, ConnStr}, _State) ->
+    log(error, "STOMP detected network error on ~s: ~p~n",
+        [ConnStr, Reason]);
+
+log_reason({network_error, Reason}, _State) ->
+    log(error, "STOMP detected network error: ~p~n", [Reason]);
+
+log_reason({shutdown, client_heartbeat_timeout},
+           #reader_state{ processor_state = ProcState }) ->
+    AdapterName = rabbit_stomp_processor:adapter_name(ProcState),
+    rabbit_log:warning("STOMP detected missed client heartbeat(s) "
+                       "on connection ~s, closing it~n", [AdapterName]);
+
+log_reason(normal, #reader_state{ conn_name  = ConnName}) ->
+    log(info, "closing STOMP connection ~p (~s)~n", [self(), ConnName]);
+
+log_reason(Reason, #reader_state{ processor_state = ProcState }) ->
+    AdapterName = rabbit_stomp_processor:adapter_name(ProcState),
+    rabbit_log:warning("STOMP connection ~s terminated"
+                       " with reason ~p, closing it~n", [AdapterName, Reason]).
+
+
+%%----------------------------------------------------------------------------
+
+processor_args(Configuration, Sock) ->
+    SendFun = fun (sync, IoData) ->
+                      %% no messages emitted
+                      catch rabbit_net:send(Sock, IoData);
+                  (async, IoData) ->
+                      %% {inet_reply, _, _} will appear soon
+                      %% We ignore certain errors here, as we will be
+                      %% receiving an asynchronous notification of the
+                      %% same (or a related) fault shortly anyway. See
+                      %% bug 21365.
+                      catch rabbit_net:port_command(Sock, IoData)
+              end,
+    {ok, {PeerAddr, _PeerPort}} = rabbit_net:sockname(Sock),
+    {SendFun, adapter_info(Sock), 
+     ssl_login_name(Sock, Configuration), PeerAddr}.
+
+adapter_info(Sock) ->
+    amqp_connection:socket_adapter_info(Sock, {'STOMP', 0}).
+
+ssl_login_name(_Sock, #stomp_configuration{ssl_cert_login = false}) ->
+    none;
+ssl_login_name(Sock, #stomp_configuration{ssl_cert_login = true}) ->
+    case rabbit_net:peercert(Sock) of
+        {ok, C}              -> case rabbit_ssl:peer_cert_auth_name(C) of
+                                    unsafe    -> none;
+                                    not_found -> none;
+                                    Name      -> Name
+                                end;
+        {error, no_peercert} -> none;
+        nossl                -> none
+    end.
+
+%%----------------------------------------------------------------------------
+
+start_heartbeats(_,   {0,0}    ) -> ok;
+start_heartbeats(Pid, Heartbeat) -> Pid ! {start_heartbeats, Heartbeat}.
+
+maybe_emit_stats(State) ->
+    rabbit_event:if_enabled(State, #reader_state.stats_timer,
+                            fun() -> emit_stats(State) end).
+
+emit_stats(State=#reader_state{socket = Sock, state = ConnState, connection = Conn}) ->
+    SockInfos = case rabbit_net:getstat(Sock,
+            [recv_oct, recv_cnt, send_oct, send_cnt, send_pend]) of
+        {ok,    SI} -> SI;
+        {error,  _} -> []
+    end,
+    Infos = [{pid, Conn}, {state, ConnState} | SockInfos],
+    rabbit_event:notify(connection_stats, Infos),
+    State1 = rabbit_event:reset_stats_timer(State, #reader_state.stats_timer),
+    ensure_stats_timer(State1).
+
+ensure_stats_timer(State = #reader_state{}) ->
+    rabbit_event:ensure_stats_timer(State, #reader_state.stats_timer, emit_stats).
+
+%%----------------------------------------------------------------------------
+
+
+processor_state(#reader_state{ processor_state = ProcState }) -> ProcState.
+processor_state(ProcState, #reader_state{} = State) ->
+    State#reader_state{ processor_state = ProcState}.
similarity index 51%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_sup.erl
rename to deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl
index d376e5e81e234d50ee538a46659cf569ee93dfe5..817fc2a7f70efb9dc890d2f66c760faddaf171f3 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_stomp_sup).
 
 -export([start_link/2, init/1]).
 
--export([start_client/2, start_ssl_client/3]).
-
 start_link(Listeners, Configuration) ->
     supervisor:start_link({local, ?MODULE}, ?MODULE,
                           [Listeners, Configuration]).
 
 init([{Listeners, SslListeners0}, Configuration]) ->
+    NumTcpAcceptors = application:get_env(rabbitmq_stomp, num_tcp_acceptors, 10),
     {ok, SocketOpts} = application:get_env(rabbitmq_stomp, tcp_listen_options),
-    {SslOpts, SslListeners}
+    {SslOpts, NumSslAcceptors, SslListeners}
         = case SslListeners0 of
-              [] -> {none, []};
+              [] -> {none, 0, []};
               _  -> {rabbit_networking:ensure_ssl(),
+                     application:get_env(rabbitmq_stomp, num_ssl_acceptors, 1),
                      case rabbit_networking:poodle_check('STOMP') of
                          ok     -> SslListeners0;
                          danger -> []
                      end}
           end,
     {ok, {{one_for_all, 10, 10},
-          [{rabbit_stomp_client_sup_sup,
-            {rabbit_client_sup, start_link,
-             [{local, rabbit_stomp_client_sup_sup},
-              {rabbit_stomp_client_sup, start_link,[]}]},
-            transient, infinity, supervisor, [rabbit_client_sup]} |
            listener_specs(fun tcp_listener_spec/1,
-                          [SocketOpts, Configuration], Listeners) ++
+                          [SocketOpts, Configuration, NumTcpAcceptors], Listeners) ++
            listener_specs(fun ssl_listener_spec/1,
-                          [SocketOpts, SslOpts, Configuration], SslListeners)]}}.
+                          [SocketOpts, SslOpts, Configuration, NumSslAcceptors], SslListeners)}}.
 
 listener_specs(Fun, Args, Listeners) ->
     [Fun([Address | Args]) ||
         Listener <- Listeners,
         Address  <- rabbit_networking:tcp_listener_addresses(Listener)].
 
-tcp_listener_spec([Address, SocketOpts, Configuration]) ->
+tcp_listener_spec([Address, SocketOpts, Configuration, NumAcceptors]) ->
     rabbit_networking:tcp_listener_spec(
       rabbit_stomp_listener_sup, Address, SocketOpts,
-      stomp, "STOMP TCP Listener",
-      {?MODULE, start_client, [Configuration]}).
+      ranch_tcp, rabbit_stomp_client_sup, Configuration,
+      stomp, NumAcceptors, "STOMP TCP Listener").
 
-ssl_listener_spec([Address, SocketOpts, SslOpts, Configuration]) ->
+ssl_listener_spec([Address, SocketOpts, SslOpts, Configuration, NumAcceptors]) ->
     rabbit_networking:tcp_listener_spec(
-      rabbit_stomp_listener_sup, Address, SocketOpts,
-      'stomp/ssl', "STOMP SSL Listener",
-      {?MODULE, start_ssl_client, [Configuration, SslOpts]}).
-
-start_client(Configuration, Sock, SockTransform) ->
-    {ok, _Child, Reader} = supervisor:start_child(rabbit_stomp_client_sup_sup,
-                                                  [Configuration]),
-    ok = rabbit_net:controlling_process(Sock, Reader),
-    Reader ! {go, Sock, SockTransform},
-
-    %% see comment in rabbit_networking:start_client/2
-    gen_event:which_handlers(error_logger),
-
-    Reader.
-
-start_client(Configuration, Sock) ->
-    start_client(Configuration, Sock, fun (S) -> {ok, S} end).
-
-start_ssl_client(Configuration, SslOpts, Sock) ->
-    Transform = rabbit_networking:ssl_transform_fun(SslOpts),
-    start_client(Configuration, Sock, Transform).
+      rabbit_stomp_listener_sup, Address, SocketOpts ++ SslOpts,
+      ranch_ssl, rabbit_stomp_client_sup, Configuration,
+      'stomp/ssl', NumAcceptors, "STOMP SSL Listener").
similarity index 85%
rename from rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_util.erl
rename to deps/rabbitmq_stomp/src/rabbit_stomp_util.erl
index 9034a2bd87692f0ca0a4a45712554e35494bd49c..e9bf6e40b7d1e890d36cad0fb040eb332f220eef 100644 (file)
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_stomp_util).
 
--export([parse_message_id/1, subscription_queue_name/2]).
+-export([parse_message_id/1, subscription_queue_name/3]).
 -export([longstr_field/2]).
 -export([ack_mode/1, consumer_tag_reply_to/1, consumer_tag/1, message_headers/1,
          headers_post_process/1, headers/5, message_properties/1, tag_to_id/1,
-         msg_header_name/1, ack_header_name/1, build_arguments/1]).
+         msg_header_name/1, ack_header_name/1, build_arguments/1, build_params/2,
+         has_durable_header/1]).
 -export([negotiate_version/2]).
 -export([trim_headers/1]).
 
@@ -274,7 +275,6 @@ build_arguments(Headers) ->
                     Headers),
     {arguments, Arguments}.
 
-%% build the actual value thru pattern matching
 build_argument(?HEADER_X_DEAD_LETTER_EXCHANGE, Val) ->
     {list_to_binary(?HEADER_X_DEAD_LETTER_EXCHANGE), longstr,
      list_to_binary(string:strip(Val))};
@@ -297,6 +297,58 @@ build_argument(?HEADER_X_MESSAGE_TTL, Val) ->
     {list_to_binary(?HEADER_X_MESSAGE_TTL), long,
      list_to_integer(string:strip(Val))}.
 
+build_params(EndPoint, Headers) ->
+    Params = lists:foldl(fun({K, V}, Acc) ->
+                             case lists:member(K, ?HEADER_PARAMS) of
+                               true  -> [build_param(K, V) | Acc];
+                               false -> Acc
+                             end
+                         end,
+                         [],
+                         Headers),
+    rabbit_misc:plmerge(Params, default_params(EndPoint)).
+
+build_param(?HEADER_PERSISTENT, Val) ->
+    {durable, string_to_boolean(Val)};
+
+build_param(?HEADER_DURABLE, Val) ->
+    {durable, string_to_boolean(Val)};
+
+build_param(?HEADER_AUTO_DELETE, Val) ->
+    {auto_delete, string_to_boolean(Val)};
+
+build_param(?HEADER_EXCLUSIVE, Val) ->
+    {exclusive, string_to_boolean(Val)}.
+
+default_params({queue, _}) ->
+    [{durable, true}];
+
+default_params({exchange, _}) ->
+    [{exclusive, false}, {auto_delete, true}];
+
+default_params({topic, _}) ->
+    [{exclusive, false}, {auto_delete, true}];
+
+default_params(_) ->
+    [{durable, false}].
+
+string_to_boolean("True") ->
+    true;
+string_to_boolean("true") ->
+    true;
+string_to_boolean("False") ->
+    false;
+string_to_boolean("false") ->
+    false;
+string_to_boolean(_) ->
+    undefined.
+
+has_durable_header(Frame) ->
+    rabbit_stomp_frame:boolean_header(
+      Frame, ?HEADER_DURABLE, false) or
+    rabbit_stomp_frame:boolean_header(
+      Frame, ?HEADER_PERSISTENT, false).
+
 %%--------------------------------------------------------------------
 %% Destination Formatting
 %%--------------------------------------------------------------------
@@ -314,14 +366,19 @@ format_destination(Exchange, RoutingKey) ->
 %% Destination Parsing
 %%--------------------------------------------------------------------
 
-subscription_queue_name(Destination, SubscriptionId) ->
-    %% We need a queue name that a) can be derived from the
-    %% Destination and SubscriptionId, and b) meets the constraints on
-    %% AMQP queue names. It doesn't need to be secure; we use md5 here
-    %% simply as a convenient means to bound the length.
-    rabbit_guid:string(
-      erlang:md5(term_to_binary({Destination, SubscriptionId})),
-      "stomp-subscription").
+subscription_queue_name(Destination, SubscriptionId, Frame) ->
+    case rabbit_stomp_frame:header(Frame, ?HEADER_X_QUEUE_NAME, undefined) of
+        undefined ->
+            %% We need a queue name that a) can be derived from the
+            %% Destination and SubscriptionId, and b) meets the constraints on
+            %% AMQP queue names. It doesn't need to be secure; we use md5 here
+            %% simply as a convenient means to bound the length.
+            rabbit_guid:string(
+              erlang:md5(term_to_binary({Destination, SubscriptionId})),
+              "stomp-subscription");
+        Name ->
+            Name
+    end.
 
 %% ---- Helpers ----
 
diff --git a/deps/rabbitmq_stomp/src/rabbitmq_stomp.app.src b/deps/rabbitmq_stomp/src/rabbitmq_stomp.app.src
new file mode 100644 (file)
index 0000000..444cec0
--- /dev/null
@@ -0,0 +1,23 @@
+{application, rabbitmq_stomp,
+ [{description, "RabbitMQ STOMP plugin"},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {mod, {rabbit_stomp, []}},
+  {env, [{default_user,
+          [{login, <<"guest">>},
+           {passcode, <<"guest">>}]},
+         {default_vhost, <<"/">>},
+         {ssl_cert_login, false},
+         {implicit_connect, false},
+         {tcp_listeners, [61613]},
+         {num_tcp_acceptors, 10},
+         {ssl_listeners, []},
+         {num_ssl_acceptors, 1},
+         {tcp_listen_options, [{backlog,   128},
+                               {nodelay,   true}]},
+        %% see rabbitmq/rabbitmq-stomp#39
+        {trailing_lf, true},
+        %% see rabbitmq/rabbitmq-stomp#57
+        {hide_server_info, false}]},
+  {applications, [kernel, stdlib, rabbit_common, rabbit, amqp_client]}]}.
diff --git a/deps/rabbitmq_top/CODE_OF_CONDUCT.md b/deps/rabbitmq_top/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_top/CONTRIBUTING.md b/deps/rabbitmq_top/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_top/Makefile b/deps/rabbitmq_top/Makefile
new file mode 100644 (file)
index 0000000..c15576e
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_top
+
+DEPS = rabbit_common rabbit amqp_client rabbitmq_management
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_top/README.md b/deps/rabbitmq_top/README.md
new file mode 100644 (file)
index 0000000..1d5b546
--- /dev/null
@@ -0,0 +1,42 @@
+# Features
+
+Adds top-like information on the Erlang VM to the management plugin.
+
+Screenshots: http://imgur.com/a/BjVOP
+
+Should work with older versions of RabbitMQ, but when compiled against
+RabbitMQ 3.3.0 or later you can see descriptions of the processes
+matching RabbitMQ server concepts (queue, channel etc).
+
+Sort by process ID, memory use or reductions/sec (an approximate
+measure of CPU use).
+
+Click on the process description (e.g. "my queue") to see that
+object's management view.
+
+Click on the process ID (e.g. "&lt;0.3423.0&gt;") to see some more
+Erlang-ish process details, including the current stacktrace.
+
+# Downloading
+
+You can download a pre-built binary of this plugin from
+http://www.rabbitmq.com/community-plugins.html.
+
+# Building
+
+You can build and install it like any other plugin (see
+[the plugin development guide](http://www.rabbitmq.com/plugin-development.html)).
+
+# API
+
+You can drive the HTTP API yourself. It installs into the management plugin's API; you should understand that first. Once you do, the additional paths look like:
+
+    /api/top/<node-name>
+
+List of processes. Takes similar query string parameters to other
+lists, `sort`, `sort_reverse` and `columns`. Sorting is quite
+important as it currently hard-codes returning the top 20 processes.
+
+    /api/process/<pid>
+
+Individual process details.
diff --git a/deps/rabbitmq_top/erlang.mk b/deps/rabbitmq_top/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_top/priv/www/js/tmpl/ets_tables.ejs b/deps/rabbitmq_top/priv/www/js/tmpl/ets_tables.ejs
new file mode 100644 (file)
index 0000000..591e492
--- /dev/null
@@ -0,0 +1,52 @@
+<h1>Top ETS Tables: <b><%= top.node %></b></h1>
+
+<p>
+  Node:
+  <select id="top-node-ets">
+  <% for (var i = 0; i < nodes.length; i++) { %>
+     <option name="#/top/<%= fmt_string(nodes[i].name) %>"<% if (nodes[i].name == top.node) { %>selected="selected"<% } %>><%= nodes[i].name %></option>
+  <% } %>
+  </select>
+
+  Rows:
+  <select id="row-count-ets">
+  <%
+    var row_counts = [20, 50, 100, 150];
+    for (var i = 0; i < row_counts.length; i++) {
+  %>
+    <option name="<%= row_counts[i] %>"
+        <% if (row_counts[i] == top.row_count) { %>selected="selected"<% } %>>
+    <%= row_counts[i] %></option>
+  <% } %>
+  </select>
+</p>
+
+<table class="list updatable">
+ <thead>
+  <tr>
+    <th><%= fmt_sort('Name', 'name') %></th>
+    <th><%= fmt_sort('Owner Name', 'owner_name') %></th>
+    <th><%= fmt_sort('Memory', 'memory') %></th>
+    <th><%= fmt_sort('Size', 'size') %></th>
+    <th><%= fmt_sort('Type', 'type') %></th>
+    <th>Protection</th>
+    <th>Compressed</th>
+  </tr>
+ </thead>
+ <tbody>
+<%
+ for (var i = 0; i < top.ets_tables.length; i++) {
+    var table = top.ets_tables[i];
+%>
+  <tr<%= alt_rows(i)%>>
+    <td><%= fmt_string(table.name) %></td>
+    <td><%= fmt_string(table.owner_name) %></td>
+    <td><%= fmt_bytes(table.memory * 1.0) %></td>
+    <td><%= table.size %></td>
+    <td><%= table.type %></td>
+    <td><%= table.protection %></td>
+    <td><%= table.compressed %></td>
+  </tr>
+<% } %>
+ </tbody>
+</table>
diff --git a/deps/rabbitmq_top/priv/www/js/tmpl/process.ejs b/deps/rabbitmq_top/priv/www/js/tmpl/process.ejs
new file mode 100644 (file)
index 0000000..a567c6e
--- /dev/null
@@ -0,0 +1,54 @@
+<h1>Process: <b><%= fmt_string(process.pid) %></b></h1>
+
+<div class="updatable">
+  <table class="facts">
+    <tr>
+      <th>Type</th>
+      <td><%= fmt_remove_rabbit_prefix(process.name.type) %></td>
+    </tr>
+    <tr>
+      <th>Description</th>
+      <td><%= fmt_process_name(process) %></td>
+    </tr>
+    <tr>
+      <th>Memory</th>
+      <td><%= fmt_bytes(process.memory) %></td>
+    </tr>
+    <tr>
+      <th>Message queue length</th>
+      <td><%= process.message_queue_len %></td>
+    </tr>
+    <tr>
+      <th>Reductions / sec</th>
+      <td><%= fmt_reduction_delta(process.reduction_delta) %></td>
+    </tr>
+    <tr>
+      <th>Total reductions</th>
+      <td><%= process.reductions %></td>
+    </tr>
+    <tr>
+      <th>Status</th>
+      <td><%= process.status %></td>
+    </tr>
+    <tr>
+      <th>Trap exit</th>
+      <td><%= fmt_boolean(process.trap_exit) %></td>
+    </tr>
+    <tr>
+      <th>Links</th>
+      <td><%= fmt_pids(process.links) %></td>
+    </tr>
+    <tr>
+      <th>Monitors</th>
+      <td><%= fmt_pids(process.monitors) %></td>
+    </tr>
+    <tr>
+      <th>Monitored by</th>
+      <td><%= fmt_pids(process.monitored_by) %></td>
+    </tr>
+    <tr>
+      <th>Current stacktrace</th>
+      <td><pre><%= fmt_string(process.current_stacktrace) %></pre></td>
+    </tr>
+  </table>
+</div>
diff --git a/deps/rabbitmq_top/priv/www/js/tmpl/processes.ejs b/deps/rabbitmq_top/priv/www/js/tmpl/processes.ejs
new file mode 100644 (file)
index 0000000..ed863f2
--- /dev/null
@@ -0,0 +1,52 @@
+<h1>Top Processes: <b><%= top.node %></b></h1>
+
+<p>
+  Node:
+  <select id="top-node">
+  <% for (var i = 0; i < nodes.length; i++) { %>
+     <option name="#/top/<%= fmt_string(nodes[i].name) %>"<% if (nodes[i].name == top.node) { %>selected="selected"<% } %>><%= nodes[i].name %></option>
+  <% } %>
+  </select>
+
+  Rows:
+  <select id="row-count">
+  <%
+    var row_counts = [20, 50, 100, 150];
+    for (var i = 0; i < row_counts.length; i++) {
+  %>
+    <option name="<%= row_counts[i] %>"
+        <% if (row_counts[i] == top.row_count) { %>selected="selected"<% } %>>
+    <%= row_counts[i] %></option>
+  <% } %>
+  </select>
+</p>
+
+<table class="list updatable">
+ <thead>
+  <tr>
+    <th><%= fmt_sort('Process', 'pid') %></th>
+    <th>Description</th>
+    <th>Type</th>
+    <th><%= fmt_sort('Memory', 'memory') %></th>
+    <th><%= fmt_sort('Reductions / sec', 'reduction_delta') %></th>
+    <th>Message queue</th>
+    <th>Status</th>
+  </tr>
+ </thead>
+ <tbody>
+<%
+ for (var i = 0; i < top.processes.length; i++) {
+    var process = top.processes[i];
+%>
+  <tr<%= alt_rows(i)%>>
+    <td><%= link_pid(process.pid) %></td>
+    <td><%= fmt_process_name(process) %></td>
+    <td><%= fmt_remove_rabbit_prefix(process.name.type) %></td>
+    <td><%= fmt_bytes(process.memory * 1.0) %></td>
+    <td class="r"><%= fmt_reduction_delta(process.reduction_delta) %></td>
+    <td class="r"><%= process.message_queue_len %></td>
+    <td><%= process.status %></td>
+  </tr>
+<% } %>
+ </tbody>
+</table>
diff --git a/deps/rabbitmq_top/priv/www/js/top.js b/deps/rabbitmq_top/priv/www/js/top.js
new file mode 100644 (file)
index 0000000..c5c4225
--- /dev/null
@@ -0,0 +1,96 @@
+dispatcher_add(function(sammy) {
+    sammy.get('#/top', function() {
+            var nodes = JSON.parse(sync_get('/nodes'));
+            go_to('#/top/' + nodes[0].name + "/20");
+        });
+    sammy.get('#/top/ets', function() {
+            var nodes = JSON.parse(sync_get('/nodes'));
+            go_to('#/top/ets/' + nodes[0].name + "/20");
+        });
+    sammy.get('#/top/:node/:row_count', function() {
+            render({'top':   {path:    '/top/' + esc(this.params['node']),
+                              options: {sort: true,
+                                        row_count: this.params['row_count']}},
+                    'nodes': '/nodes'},
+                    'processes', '#/top');
+        });
+    sammy.get('#/top/ets/:node/:row_count', function() {
+            render({'top': {path:    '/top/ets/' + esc(this.params['node']),
+                            options: {sort: true,
+                                      row_count: this.params['row_count']}},
+                    'nodes': '/nodes'},
+                    'ets_tables', '#/top/ets');
+        });
+    sammy.get('#/process/:pid', function() {
+            render({'process': '/process/' + esc(this.params['pid'])},
+                    'process', '#/top');
+        });
+});
+
+NAVIGATION['Admin'][0]['Top Processes']  = ['#/top', 'administrator'];
+NAVIGATION['Admin'][0]['Top ETS Tables'] = ['#/top/ets', 'administrator'];
+
+$('select#top-node').live('change', function() {
+    go_to('#/top/' + $(this).val());
+});
+
+$('select#top-node-ets').live('change', function() {
+    go_to('#/top/ets' + $(this).val());
+});
+
+$('select#row-count').live('change', function() {
+    go_to('#/top/' + $('select#top-node').val() + "/" + $(this).val());
+});
+
+$('select#row-count-ets').live('change', function() {
+    go_to('#/top/ets/' + $('select#top-node-ets').val() + "/" + $(this).val());
+});
+
+function link_pid(name) {
+    return _link_to(name, '#/process/' + esc(name));
+}
+
+function fmt_process_name(process) {
+    if (process == undefined) return '';
+    var name = process.name;
+
+    if (name.supertype != undefined) {
+        if (name.supertype == 'channel') {
+            return link_channel(name.connection_name + ' (' +
+                                name.channel_number + ')');
+        }
+        else if (name.supertype == 'queue') {
+            return link_queue(name.vhost, name.queue_name);
+        }
+        else if (name.supertype == 'connection') {
+            return link_conn(name.connection_name);
+        }
+    }
+    else {
+        return '<b>' + name.name + '</b>';
+    }
+}
+
+function fmt_remove_rabbit_prefix(name) {
+    if (name == 'rabbit_amqqueue_process') return 'queue';
+
+    if (name.substring(0, 7) == 'rabbit_') {
+        return name.substring(7);
+    }
+    else {
+        return name;
+    }
+}
+
+function fmt_pids(pids) {
+    var txt = '';
+    for (var i = 0; i < pids.length; i++) {
+        txt += link_pid(pids[i]) + ' ';
+    }
+
+    return txt;
+}
+
+function fmt_reduction_delta(delta) {
+    return Math.round(delta / 5); // gen_server updates every 5s
+}
diff --git a/deps/rabbitmq_top/rabbitmq-components.mk b/deps/rabbitmq_top/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 57%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_all.erl
rename to deps/rabbitmq_top/src/rabbit_top_app.erl
index aade81591dd7ec3a00a3977db9195c947cb1cbc4..a1e7e96f51c4b0e8e164426ff38337e81cf71232 100644 (file)
@@ -8,18 +8,19 @@
 %%   License for the specific language governing rights and limitations
 %%   under the License.
 %%
-%%   The Original Code is RabbitMQ Management Console.
+%%   The Original Code is RabbitMQ.
 %%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
+%%   The Initial Developer of the Original Code is VMware, Inc.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
--module(rabbit_ws_test_all).
+-module(rabbit_top_app).
 
--export([all_tests/0]).
+-behaviour(application).
+-export([start/2, stop/1]).
 
-all_tests() ->
-    ok = eunit:test(rabbit_ws_test_raw_websocket, [verbose]),
-    ok = eunit:test(rabbit_ws_test_sockjs_websocket, [verbose]),
-    ok.
+start(_Type, _StartArgs) ->
+    rabbit_top_sup:start_link().
 
+stop(_State) ->
+    ok.
diff --git a/deps/rabbitmq_top/src/rabbit_top_extension.erl b/deps/rabbitmq_top/src/rabbit_top_extension.erl
new file mode 100644 (file)
index 0000000..b45b088
--- /dev/null
@@ -0,0 +1,26 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 1.1 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ.
+%%
+%%  The Initial Developer of the Original Code is VMware, Inc.
+%%  Copyright (c) 2007-2012 VMware, Inc.  All rights reserved.
+%%
+
+-module(rabbit_top_extension).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0, web_ui/0]).
+
+dispatcher() -> [{["top",        node], rabbit_top_wm_processes, []},
+                 {["top", "ets", node], rabbit_top_wm_ets_tables, []},
+                 {["process",    pid],  rabbit_top_wm_process, []}].
+web_ui()     -> [{javascript, <<"top.js">>}].
similarity index 53%
rename from rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup.erl
rename to deps/rabbitmq_top/src/rabbit_top_sup.erl
index 992ff722629b0dfc7c0f1152844efa02fe42267d..f2603b26c3e7f9ec930c358c8ab8c4774bd6934d 100644 (file)
@@ -8,27 +8,27 @@
 %%   License for the specific language governing rights and limitations
 %%   under the License.
 %%
-%%   The Original Code is RabbitMQ Management Console.
+%%   The Original Code is RabbitMQ.
 %%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2011-2015 Pivotal Software, Inc.  All rights reserved.
+%%   The Initial Developer of the Original Code is VMware, Inc.
+%%   Copyright (c) 2011-2012 VMware, Inc.  All rights reserved.
 %%
 
--module(rabbit_mgmt_sup).
+-module(rabbit_top_sup).
 
--behaviour(mirrored_supervisor).
+-behaviour(supervisor).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
 
--export([init/1]).
 -export([start_link/0]).
+-export([init/1]).
 
--include_lib("rabbit_common/include/rabbit.hrl").
+start_link() ->
+     supervisor:start_link({local, ?MODULE}, ?MODULE, []).
 
 init([]) ->
-    DB = {rabbit_mgmt_db, {rabbit_mgmt_db, start_link, []},
-          permanent, ?MAX_WAIT, worker, [rabbit_mgmt_db]},
-    {ok, {{one_for_one, 10, 10}, [DB]}}.
+    Top = {rabbit_top_worker,
+           {rabbit_top_worker, start_link, []},
+           permanent, ?WORKER_WAIT, worker, [rabbit_top_worker]},
+    {ok, {{one_for_one, 10, 10}, [Top]}}.
 
-start_link() ->
-     mirrored_supervisor:start_link(
-       {local, ?MODULE}, ?MODULE, fun rabbit_misc:execute_mnesia_transaction/1,
-       ?MODULE, []).
diff --git a/deps/rabbitmq_top/src/rabbit_top_util.erl b/deps/rabbitmq_top/src/rabbit_top_util.erl
new file mode 100644 (file)
index 0000000..225c09f
--- /dev/null
@@ -0,0 +1,132 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 1.1 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ.
+%%
+%%  The Initial Developer of the Original Code is VMware, Inc.
+%%  Copyright (c) 2007-2012 VMware, Inc.  All rights reserved.
+%%
+
+-module(rabbit_top_util).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([toplist/3, fmt_all/1, fmt/1, obtain_name/1, safe_process_info/2]).
+
+toplist(Key, Count, List) ->
+    Sorted = lists:sublist(
+               lists:reverse(
+                 lists:keysort(1, [toplist(Key, I) || I <- List])), Count),
+    [add_name(Info) || {_, Info} <- Sorted].
+
+toplist(Key, Info) ->
+    {Key, Val} = lists:keyfind(Key, 1, Info),
+    {Val, Info}.
+
+add_name(Info) ->
+    {pid, Pid} = lists:keyfind(pid, 1, Info),
+    [{name, obtain_name(Pid)} | Info].
+
+fmt_all(Info) -> [{K, fmt(V)} || {K, V} <- Info].
+
+fmt(Pid) when is_pid(Pid) ->
+    list_to_binary(pid_to_list(Pid));
+fmt(Other) ->
+    list_to_binary(rabbit_misc:format("~p", [Other])).
+
+obtain_name(Pid) ->
+    lists:foldl(fun(Fun,  fail) -> Fun(Pid);
+                   (_Fun, Res)  -> Res
+                end, fail, [fun obtain_from_registered_name/1,
+                            fun obtain_from_process_name/1,
+                            fun obtain_from_initial_call/1]).
+
+obtain_from_registered_name(Pid) ->
+    case safe_process_info(Pid, registered_name) of
+        {registered_name, Name} -> [{type, registered},
+                                    {name, Name}];
+        _                       -> fail
+    end.
+
+obtain_from_process_name(Pid) ->
+    case safe_process_info(Pid, dictionary) of
+        {dictionary, Dict} ->
+            case lists:keyfind(process_name, 1, Dict) of
+                {process_name, Name} -> fmt_process_name(Name);
+                false                -> fail
+            end;
+        _ ->
+            fail
+    end.
+
+fmt_process_name({Type, {ConnName, ChNum}}) when is_binary(ConnName),
+                                                 is_integer(ChNum) ->
+    [{supertype,       channel},
+     {type,            Type},
+     {connection_name, ConnName},
+     {channel_number,  ChNum}];
+
+fmt_process_name({Type, #resource{virtual_host = VHost,
+                                  name         = Name}}) ->
+    [{supertype,  queue},
+     {type,       Type},
+     {queue_name, Name},
+     {vhost,      VHost}];
+
+fmt_process_name({Type, ConnName}) when is_binary(ConnName) ->
+    [{supertype,       connection},
+     {type,            Type},
+     {connection_name, ConnName}];
+
+fmt_process_name({Type, unknown}) -> %% probably some adapter thing
+    [{supertype,       connection},
+     {type,            Type},
+     {connection_name, unknown}].
+
+obtain_from_initial_call(Pid) ->
+    case initial_call(Pid) of
+        fail -> [{type, starting},
+                 {name, fmt(Pid)}];
+        MFA  -> case guess_initial_call(MFA) of
+                    fail -> [{type, unknown},
+                             {name, fmt(MFA)}];
+                    Name -> [{type, known},
+                             {name, Name}]
+                end
+    end.
+
+initial_call(Pid) ->
+    case initial_call_dict(Pid) of
+        fail -> case safe_process_info(Pid, initial_call) of
+                    {initial_call, MFA} -> MFA;
+                    _                   -> fail
+                end;
+        MFA  -> MFA
+    end.
+
+initial_call_dict(Pid) ->
+    case safe_process_info(Pid, dictionary) of
+        {dictionary, Dict} ->
+            case lists:keyfind('$initial_call', 1, Dict) of
+                {'$initial_call', MFA} -> MFA;
+                false                  -> fail
+            end;
+        _ ->
+            fail
+    end.
+
+guess_initial_call({supervisor, _F, _A})        -> supervisor;
+guess_initial_call({supervisor2, _F, _A})       -> supervisor;
+guess_initial_call({mochiweb_acceptor, _F, _A}) -> mochiweb_http;
+guess_initial_call(_MFA)                        -> fail.
+
+
+safe_process_info(Pid, Info) ->
+    rpc:call(node(Pid), erlang, process_info, [Pid, Info]).
diff --git a/deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl b/deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl
new file mode 100644 (file)
index 0000000..8b38a97
--- /dev/null
@@ -0,0 +1,64 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 1.1 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ.
+%%
+%%  The Initial Developer of the Original Code is VMware, Inc.
+%%  Copyright (c) 2007-2012 VMware, Inc.  All rights reserved.
+%%
+
+-module(rabbit_top_wm_ets_tables).
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+
+-include_lib("rabbitmq_management/include/rabbit_mgmt.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+
+%%--------------------------------------------------------------------
+
+init(_Config) -> {ok, #context{}}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+to_json(ReqData, Context) ->
+    Sort = case wrq:get_qs_value("sort", ReqData) of
+               undefined -> memory;
+               Str       -> list_to_atom(Str)
+           end,
+    Node = b2a(rabbit_mgmt_util:id(node, ReqData)),
+    Order = case wrq:get_qs_value("sort_reverse", ReqData) of
+                "true" -> asc;
+                _      -> desc
+            end,
+    RowCount = case wrq:get_qs_value("row_count", ReqData) of
+                   undefined -> 20;
+                   List when is_list(List) -> list_to_integer(List)
+               end,
+    rabbit_mgmt_util:reply([{node,       Node},
+                            {row_count,  RowCount},
+                            {ets_tables, ets_tables(Node, Sort, Order, RowCount)}],
+                           ReqData, Context).
+
+is_authorized(ReqData, Context) ->
+    rabbit_mgmt_util:is_authorized_admin(ReqData, Context).
+
+%%--------------------------------------------------------------------
+
+b2a(B) -> list_to_atom(binary_to_list(B)).
+
+ets_tables(Node, Sort, Order, RowCount) ->
+    [fmt(P) || P <- rabbit_top_worker:ets_tables(Node, Sort, Order, RowCount)].
+
+fmt(Info) ->
+    {owner, Pid} = lists:keyfind(owner, 1, Info),
+    Info1 = lists:keydelete(owner, 1, Info),
+    [{owner,  rabbit_top_util:fmt(Pid)} | Info1].
diff --git a/deps/rabbitmq_top/src/rabbit_top_wm_process.erl b/deps/rabbitmq_top/src/rabbit_top_wm_process.erl
new file mode 100644 (file)
index 0000000..17a893d
--- /dev/null
@@ -0,0 +1,76 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 1.1 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ.
+%%
+%%  The Initial Developer of the Original Code is VMware, Inc.
+%%  Copyright (c) 2007-2012 VMware, Inc.  All rights reserved.
+%%
+
+-module(rabbit_top_wm_process).
+
+-export([init/1, to_json/2, resource_exists/2, content_types_provided/2,
+         is_authorized/2]).
+
+-define(ADDITIONAL_INFO,
+        [current_stacktrace, trap_exit, links, monitors, monitored_by]).
+
+-include_lib("rabbitmq_management/include/rabbit_mgmt.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+
+%%--------------------------------------------------------------------
+
+init(_Config) -> {ok, #context{}}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+to_json(ReqData, Context) ->
+    rabbit_mgmt_util:reply(proc(ReqData), ReqData, Context).
+
+resource_exists(ReqData, Context) ->
+    {case proc(ReqData) of
+         not_found -> false;
+         _         -> true
+     end, ReqData, Context}.
+
+is_authorized(ReqData, Context) ->
+    rabbit_mgmt_util:is_authorized_admin(ReqData, Context).
+
+%%--------------------------------------------------------------------
+
+proc(ReqData) ->
+    PidBin = rabbit_mgmt_util:id(pid, ReqData),
+    try list_to_pid(binary_to_list(PidBin)) of
+        Pid -> case rabbit_top_worker:proc(Pid) of
+                   {ok, Base} -> [{pid,  PidBin},
+                                  {name, rabbit_top_util:obtain_name(Pid)}] ++
+                                     Base ++
+                                     case rabbit_top_util:safe_process_info(
+                                            Pid, ?ADDITIONAL_INFO) of
+                                         undefined -> [];
+                                         Props     -> fmt(Props)
+                                     end;
+                   error      -> not_found
+               end
+    catch
+        error:badarg ->
+            not_found
+    end.
+
+
+fmt(Props) -> [{K, fmt(K, V)} || {K, V} <- Props].
+
+fmt(links,              V) -> [rabbit_top_util:fmt(P) || P <- V, is_pid(P)];
+fmt(monitors,           V) -> [rabbit_top_util:fmt(P) || {process, P} <- V];
+fmt(monitored_by,       V) -> [rabbit_top_util:fmt(P) || P <- V];
+fmt(current_stacktrace, V) -> rabbit_top_util:fmt(V);
+fmt(_K,                 V) -> V.
diff --git a/deps/rabbitmq_top/src/rabbit_top_wm_processes.erl b/deps/rabbitmq_top/src/rabbit_top_wm_processes.erl
new file mode 100644 (file)
index 0000000..4695c0a
--- /dev/null
@@ -0,0 +1,65 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 1.1 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ.
+%%
+%%  The Initial Developer of the Original Code is VMware, Inc.
+%%  Copyright (c) 2007-2012 VMware, Inc.  All rights reserved.
+%%
+
+-module(rabbit_top_wm_processes).
+
+-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
+
+-include_lib("rabbitmq_management/include/rabbit_mgmt.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("webmachine/include/webmachine.hrl").
+
+%%--------------------------------------------------------------------
+
+init(_Config) -> {ok, #context{}}.
+
+content_types_provided(ReqData, Context) ->
+   {[{"application/json", to_json}], ReqData, Context}.
+
+to_json(ReqData, Context) ->
+    Sort = case wrq:get_qs_value("sort", ReqData) of
+               undefined -> reduction_delta;
+               Str       -> list_to_atom(Str)
+           end,
+    Node = b2a(rabbit_mgmt_util:id(node, ReqData)),
+    Order = case wrq:get_qs_value("sort_reverse", ReqData) of
+                "true" -> asc;
+                _      -> desc
+            end,
+    RowCount = case wrq:get_qs_value("row_count", ReqData) of
+                   undefined -> 20;
+                   List when is_list(List) -> list_to_integer(List)
+               end,
+    rabbit_mgmt_util:reply([{node,      Node},
+                            {row_count, RowCount},
+                            {processes, procs(Node, Sort, Order, RowCount)}],
+                           ReqData, Context).
+
+is_authorized(ReqData, Context) ->
+    rabbit_mgmt_util:is_authorized_admin(ReqData, Context).
+
+%%--------------------------------------------------------------------
+
+b2a(B) -> list_to_atom(binary_to_list(B)).
+
+procs(Node, Sort, Order, RowCount) ->
+    [fmt(P) || P <- rabbit_top_worker:procs(Node, Sort, Order, RowCount)].
+
+fmt(Info) ->
+    {pid, Pid} = lists:keyfind(pid, 1, Info),
+    Info1 = lists:keydelete(pid, 1, Info),
+    [{pid,  rabbit_top_util:fmt(Pid)},
+     {name, rabbit_top_util:obtain_name(Pid)} | Info1].
diff --git a/deps/rabbitmq_top/src/rabbit_top_worker.erl b/deps/rabbitmq_top/src/rabbit_top_worker.erl
new file mode 100644 (file)
index 0000000..d61a47c
--- /dev/null
@@ -0,0 +1,164 @@
+%%  The contents of this file are subject to the Mozilla Public License
+%%  Version 1.1 (the "License"); you may not use this file except in
+%%  compliance with the License. You may obtain a copy of the License
+%%  at http://www.mozilla.org/MPL/
+%%
+%%  Software distributed under the License is distributed on an "AS IS"
+%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%  the License for the specific language governing rights and
+%%  limitations under the License.
+%%
+%%  The Original Code is RabbitMQ.
+%%
+%%  The Initial Developer of the Original Code is VMware, Inc.
+%%  Copyright (c) 2007-2011 VMware, Inc.  All rights reserved.
+%%
+
+-module(rabbit_top_worker).
+-behaviour(gen_server).
+
+-define(PROCESS_INFO, [memory, message_queue_len, reductions, status]).
+
+-export([start_link/0]).
+
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+         terminate/2, code_change/3]).
+
+-export([procs/4, proc/1, ets_tables/4, ets_table/1]).
+
+-define(SERVER, ?MODULE).
+-define(MILLIS, 1000).
+-define(EVERY, 5).
+-define(SLEEP, ?EVERY * ?MILLIS).
+
+-record(state, {procs, ets_tables}).
+
+%%--------------------------------------------------------------------
+
+start_link() ->
+    gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
+
+
+procs(Node, Key, Rev, Count) ->
+    gen_server:call({?SERVER, Node}, {procs, Key, Rev, Count}, infinity).
+
+proc(Pid) ->
+    gen_server:call({?SERVER, node(Pid)}, {proc, Pid}, infinity).
+
+ets_tables(Node, Key, Rev, Count) ->
+    gen_server:call({?SERVER, Node}, {ets_tables, Key, Rev, Count}, infinity).
+
+ets_table(Name) ->
+    table_info(Name).
+
+%%--------------------------------------------------------------------
+
+init([]) ->
+    ensure_timer(),
+    {ok, #state{procs = procs(dict:new()),
+                ets_tables = ets_tables([])}}.
+
+handle_call({ets_tables, Key, Order, Count}, _From,
+            State = #state{ets_tables = Tables}) ->
+    {reply, toplist(Key, Order, Count, Tables), State};
+
+handle_call({procs, Key, Order, Count}, _From, State = #state{procs = Procs}) ->
+    {reply, toplist(Key, Order, Count, flatten(Procs)), State};
+
+handle_call({proc, Pid}, _From, State = #state{procs = Procs}) ->
+    {reply, dict:find(Pid, Procs), State}.
+
+handle_cast(_Msg, State) ->
+    {noreply, State}.
+
+handle_info(_Msg, State = #state{procs = OldProcs, ets_tables = OldTables}) ->
+    ensure_timer(),
+    {noreply, State#state{procs = procs(OldProcs),
+                          ets_tables = ets_tables(OldTables)}};
+
+handle_info(_Msg, State) ->
+    {noreply, State}.
+
+terminate(_Reason, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+%%--------------------------------------------------------------------
+
+ensure_timer() ->
+    erlang:send_after(?SLEEP, self(), update).
+
+procs(OldProcs) ->
+    lists:foldl(
+      fun(Pid, Procs) ->
+              case process_info(Pid, ?PROCESS_INFO) of
+                  undefined ->
+                      Procs;
+                  Props ->
+                      Delta = (reductions(Props) -
+                                   case dict:find(Pid, OldProcs) of
+                                       {ok, OldProps} -> reductions(OldProps);
+                                       error          -> 0
+                                   end) div ?EVERY,
+                      dict:store(
+                        Pid, [{reduction_delta, Delta} | Props], Procs)
+              end
+      end, dict:new(), processes()).
+
+reductions(Props) ->
+    {reductions, R} = lists:keyfind(reductions, 1, Props),
+    R.
+
+ets_tables(_OldTables) ->
+    lists:filtermap(
+        fun(Table) ->
+            case table_info(Table) of
+                undefined -> false;
+                Info      -> {true, Info}
+            end
+        end,
+        ets:all()).
+
+table_info(Table) when not is_atom(Table) -> undefined;
+table_info(TableName) when is_atom(TableName) ->
+    Info = lists:map(fun
+                        ({memory, MemWords}) -> {memory, bytes(MemWords)};
+                        (Other) -> Other
+                     end,
+                     ets:info(TableName)),
+    {owner, OwnerPid} = lists:keyfind(owner, 1, Info),
+    case process_info(OwnerPid, registered_name) of
+        []                           -> Info;
+        {registered_name, OwnerName} -> [{owner_name, OwnerName} | Info]
+    end.
+
+flatten(Procs) ->
+    dict:fold(fun(Name, Props, Rest) ->
+                      [[{pid, Name} | Props] | Rest]
+              end, [], Procs).
+
+%%--------------------------------------------------------------------
+
+toplist(Key, Order, Count, List) ->
+    RevFun = case Order of
+                 asc  -> fun (L) -> L end;
+                 desc -> fun lists:reverse/1
+             end,
+    Keyed = [toplist(Key, I) || I <- List],
+    Sorted = lists:sublist(RevFun(lists:keysort(1, Keyed)), Count),
+    [Info || {_, Info} <- Sorted].
+
+toplist(Key, Info) ->
+    % Do not crash if unknown sort key. Keep unsorted instead.
+    case lists:keyfind(Key, 1, Info) of
+        {Key, Val} -> {Val, Info};
+        false      -> {undefined, Info}
+    end.
+
+bytes(Words) ->  try
+                     Words * erlang:system_info(wordsize)
+                 catch
+                     _:_ -> 0
+                 end.
\ No newline at end of file
diff --git a/deps/rabbitmq_top/src/rabbitmq_top.app.src b/deps/rabbitmq_top/src/rabbitmq_top.app.src
new file mode 100644 (file)
index 0000000..03a84e2
--- /dev/null
@@ -0,0 +1,7 @@
+{application, rabbitmq_top,
+ [{description, "RabbitMQ Top"},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {mod, {rabbit_top_app, []}},
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_management]}]}.
diff --git a/deps/rabbitmq_tracing/CODE_OF_CONDUCT.md b/deps/rabbitmq_tracing/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_tracing/CONTRIBUTING.md b/deps/rabbitmq_tracing/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_tracing/Makefile b/deps/rabbitmq_tracing/Makefile
new file mode 100644 (file)
index 0000000..63b1c99
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_tracing
+
+DEPS = rabbit_common rabbit rabbitmq_management webmachine
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_tracing/README.md b/deps/rabbitmq_tracing/README.md
new file mode 100644 (file)
index 0000000..16d1c44
--- /dev/null
@@ -0,0 +1,56 @@
+# RabbitMQ (Message) Tracing Plugin
+
+This is an opinionated tracing plugin that extends RabbitMQ management UI.
+It logs messages passing through vhosts [with enabled tracing](http://www.rabbitmq.com/firehose.html) to a log
+file.
+
+## Usage
+
+This plugin ships with RabbitMQ. Enabled it with `rabbitmq-plugins enable`,
+then see a "Tracing" tab in the management UI.
+
+
+## Configuration
+
+Configuration options are under the `rabbitmq_tracing` app (config section,
+if you will):
+
+ * `directory`: controls where the log files go. It defaults to "/var/tmp/rabbitmq-tracing".
+ * `username`: username to be used by tracing event consumers (default: `<<"guest">>`)
+ * `password`: password to be used by tracing event consumers (default: `<<"guest">>`)
+
+## Performance
+
+TL;DR: this plugin is intended to be used in development and QA environments.
+It will increase RAM consumption and CPU usage of a node.
+
+On a few year old developer-grade machine, rabbitmq-tracing can write
+about 2000 msg/s to a log file. You should be careful using
+rabbitmq-tracing if you think you're going to capture more messages
+than this. Any messages that can't be logged are queued.
+
+The code to serve up the log files over HTTP is not at all
+sophisticated or efficient, it loads the whole log into memory. If you
+have large log files you may wish to transfer them off the server in
+some other way.
+
+## HTTP API
+
+```
+GET            /api/traces
+GET            /api/traces/<vhost>
+GET PUT DELETE /api/traces/<vhost>/<name>
+GET            /api/trace-files
+GET     DELETE /api/trace-files/<name>    (GET returns the file as text/plain)
+```
+
+Example for how to create a trace using [RabbitMQ HTTP API](http://www.rabbitmq.com/management.html):
+
+```
+curl -i -u guest:guest -H "content-type:application/json" -XPUT \
+     http://localhost:55672/api/traces/%2f/my-trace \
+     -d'{"format":"text","pattern":"#", "max_payload_bytes":1000}'
+```
+
+`max_payload_bytes` is optional (omit it to prevent payload truncation),
+format and pattern are mandatory.
diff --git a/deps/rabbitmq_tracing/erlang.mk b/deps/rabbitmq_tracing/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_tracing/rabbitmq-components.mk b/deps/rabbitmq_tracing/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_app.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_app.erl
index 815855bb1e28d83c29edcfd0f1174662eca9e92f..4f0a29d80a5d2024de0a127d6cfe49720a717694 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_app).
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_consumer.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl
index ca2273bc852cd0d8f384d35d76fdb429ffdba488..d716497ba05f366c123c4d9bdd59e736637e4670 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_consumer).
@@ -28,6 +28,9 @@
                      vhost, username, channel, routing_keys, routed_queues,
                      properties, payload}).
 
+-define(DEFAULT_USERNAME, <<"guest">>).
+-define(DEFAULT_PASSWORD, <<"guest">>).
+
 -define(X, <<"amq.rabbitmq.trace">>).
 -define(MAX_BUF, 100).
 
@@ -47,9 +50,15 @@ init(Args) ->
     process_flag(trap_exit, true),
     Name = pget(name, Args),
     VHost = pget(vhost, Args),
+    Username = rabbit_tracing_util:coerce_env_value(username,
+        rabbit_misc:get_env(rabbitmq_tracing, username, ?DEFAULT_USERNAME)),
+    Password = rabbit_tracing_util:coerce_env_value(password,
+        rabbit_misc:get_env(rabbitmq_tracing, password, ?DEFAULT_PASSWORD)),
     MaxPayload = pget(max_payload_bytes, Args, unlimited),
     {ok, Conn} = amqp_connection:start(
-                   #amqp_params_direct{virtual_host = VHost}),
+                   #amqp_params_direct{virtual_host = VHost,
+                                       username = Username,
+                                       password = Password}),
     link(Conn),
     {ok, Ch} = amqp_connection:open_channel(Conn),
     link(Ch),
@@ -144,7 +153,8 @@ delivery_to_log_record({#'basic.deliver'{routing_key = Key},
     {longstr, VHost}  = table_lookup(H, <<"vhost">>),
     {longstr, User}   = table_lookup(H, <<"user">>),
     {signedint, Chan} = table_lookup(H, <<"channel">>),
-    #log_record{timestamp    = rabbit_mgmt_format:now_to_str_ms(os:timestamp()),
+    #log_record{timestamp    = rabbit_mgmt_format:now_to_str_ms(
+                                 time_compat:os_system_time(milli_seconds)),
                 type         = Type,
                 exchange     = X,
                 queue        = Q,
similarity index 90%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_consumer_sup.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl
index 8dbc22f9da1122d50a5f71948eb521ce26f1bf66..c467125cebf9f0bf1821e93283c79e7fd7ba90d5 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ Federation.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_consumer_sup).
@@ -30,5 +30,5 @@ start_link(Args) -> supervisor2:start_link(?MODULE, Args).
 init(Args) ->
     {ok, {{one_for_one, 3, 10},
           [{consumer, {rabbit_tracing_consumer, start_link, [Args]},
-            transient, ?MAX_WAIT, worker,
+            transient, ?WORKER_WAIT, worker,
             [rabbit_tracing_consumer]}]}}.
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_files.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_files.erl
index c5520e7a3e3487a5c17fde1ae0d6ae27d4ff7cde..5982e2e8a36a1862cf6e06d7a9d20b67a6ce591a 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_files).
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_mgmt.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl
index 3d2d44dfd9a78197c704a06257404f5be6613f82..dd213d5e35e543d57d58811f66b7ac041507d038 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_mgmt).
similarity index 90%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_sup.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl
index 502ef7e52342dcb0e2e163a582c6e3e668536164..7cccd6677be506ef6c6b98d3ed95b7b4b33e4f4b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_sup).
@@ -34,7 +34,7 @@ start_child(Id, Args) ->
     supervisor:start_child(
       ?SUPERVISOR,
       {Id, {rabbit_tracing_consumer_sup, start_link, [Args]},
-       temporary, ?MAX_WAIT, supervisor,
+       temporary, ?SUPERVISOR_WAIT, supervisor,
        [rabbit_tracing_consumer_sup]}).
 
 stop_child(Id) ->
@@ -46,5 +46,5 @@ stop_child(Id) ->
 
 init([]) -> {ok, {{one_for_one, 3, 10},
                   [{traces, {rabbit_tracing_traces, start_link, []},
-                    transient, ?MAX_WAIT, worker,
+                    transient, ?WORKER_WAIT, worker,
                     [rabbit_tracing_traces]}]}}.
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_traces.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl
index 53336d70fd95392dd77de3658c51f81b64afc23a..33914d1dba5d7879c56591a34bf8e0851a79bea5 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_traces).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl
new file mode 100644 (file)
index 0000000..5d26291
--- /dev/null
@@ -0,0 +1,7 @@
+-module(rabbit_tracing_util).
+
+-export([coerce_env_value/2]).
+
+coerce_env_value(username, Val) -> rabbit_data_coercion:to_binary(Val);
+coerce_env_value(password, Val) -> rabbit_data_coercion:to_binary(Val);
+coerce_env_value(_,        Val) -> Val.
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_file.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl
index 30a134b910e7aec9c81c74a36fe84b624c99d9b3..8966e8ea2e6348ebcdf698ccc1c5cf97469ab887 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 
 -module(rabbit_tracing_wm_file).
 
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_files.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl
index d3a8004f256af57df2d62f6b33419f3fd87e6347..d4312c7c09967e69d81bb3dbc8b2ea0a00828a5e 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_wm_files).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_trace.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl
index a9cdbacc4cafa46ab52fdca658f7fa1a10ca8fe9..96456bb4eee03701847a3b029f6e0cef0760bff6 100644 (file)
@@ -11,7 +11,7 @@
 %%   The Original Code is RabbitMQ.
 %%
 %%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%   Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 
 -module(rabbit_tracing_wm_trace).
 
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbit_tracing_wm_traces.erl
rename to deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl
index ef0fe50a7123006698c516bdb5cdddd81ed4df67..959edc9492eb13f1a5a6c3e101caa93d31b617d5 100644 (file)
@@ -11,7 +11,7 @@
 %%  The Original Code is RabbitMQ.
 %%
 %%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%  Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_tracing_wm_traces).
diff --git a/deps/rabbitmq_tracing/src/rabbitmq_tracing.app.src b/deps/rabbitmq_tracing/src/rabbitmq_tracing.app.src
new file mode 100644 (file)
index 0000000..418239e
--- /dev/null
@@ -0,0 +1,10 @@
+{application, rabbitmq_tracing,
+ [{description, "RabbitMQ message logging / tracing"},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {mod, {rabbit_tracing_app, []}},
+  {env, [{directory, "/var/tmp/rabbitmq-tracing"},
+         {username, <<"guest">>},
+         {password, <<"guest">>}]},
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_management]}]}.
diff --git a/deps/rabbitmq_trust_store/CODE_OF_CONDUCT.md b/deps/rabbitmq_trust_store/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_trust_store/CONTRIBUTING.md b/deps/rabbitmq_trust_store/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_trust_store/Makefile b/deps/rabbitmq_trust_store/Makefile
new file mode 100644 (file)
index 0000000..77a2ce7
--- /dev/null
@@ -0,0 +1,17 @@
+PROJECT = rabbitmq_trust_store
+
+DEPS = rabbit_common rabbit
+## We need the Cowboy's test utilities
+TEST_DEPS = rabbitmq_ct_helpers amqp_client ct_helper
+dep_ct_helper = git https://github.com/extend/ct_helper.git master
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_trust_store/README.md b/deps/rabbitmq_trust_store/README.md
new file mode 100644 (file)
index 0000000..0934a54
--- /dev/null
@@ -0,0 +1,113 @@
+# RabbitMQ Certificate Trust Store
+
+This plugin provides support for TLS (x509) certificate whitelisting.
+All plugins which use the global TLS options will be configured with
+the same whitelist.
+
+## Rationale
+
+RabbitMQ can be configured to accepted self-signed certificates
+through various TLS socket options, namely the `ca_certs` and
+`partial_chain` properties. However, this configuration is largely static.
+There is no convenient means with which to change it in realtime, that
+is, without making configuration changes to TLS listening sockets.
+
+This plugin maintains a list of trusted .PEM formatted TLS (x509) certificates in a given
+directory, refreshing at configurable intervals, or when `rabbitmqctl
+eval 'rabbit_trust_store:refresh().'` is invoked. Said certificates are then used
+to verify inbound TLS connections for the entire RabbitMQ node (all plugins and protocols).
+The list is node-local.
+
+## RabbitMQ Version Requirements
+
+This plugin requires RabbitMQ `3.6.1` or later.
+
+## Installation and Binary Builds
+
+This plugin is now available from the [RabbitMQ community plugins page](http://www.rabbitmq.com/community-plugins.html).
+Please consult the docs on [how to install RabbitMQ plugins](http://www.rabbitmq.com/plugins.html#installing-plugins).
+
+## Usage
+
+Configure the trust store with a directory of whitelisted certificates
+and a refresh interval:
+
+```
+    {rabbitmq_trust_store,
+     [{directory,        "$HOME/rabbit/whitelist"}, %% trusted certificate directory path
+      {refresh_interval, {seconds, 30}}             %% refresh interval in seconds (only)
+    ]}
+```
+
+Setting `refresh_interval` to `0` seconds will disable automatic refresh.
+
+Certificates are distinguished by their **filenames** and file modification time.
+
+### Installing a Certificate
+
+Write a `PEM` formatted certificate file to the configured directory
+to whitelist it. This contains all the necessary information to
+authorize a client which presents the very same certificate to the
+server.
+
+### Removing a Certificate
+
+Delete the certificate file from the configured directory to remove it
+from the whitelist.
+
+> Note: TLS session caching bypasses the trust store certificate validation and can
+make it seem as if a removed certificate is still active. Disabling session caching
+in the broker by setting the `reuse_sessions` ssl option to `false` can be done if
+timely certificate removal is important.
+
+
+### Listing certificates
+
+To list the currently loaded certificates use the `rabbitmqctl` utility as follows:
+
+```
+    rabbitmqctl eval 'io:format(rabbit_trust_store:list()).'
+```
+
+This will output a formatted list of certificates similar to:
+
+```
+    Name: cert.pem
+    Serial: 1 | 0x1
+    Subject: O=client,CN=snowman.local
+    Issuer: L=87613,CN=MyTestRootCA
+    Validity: "2016-05-24T15:28:25Z - 2026-05-22T15:28:25Z"
+```
+
+Note that this command reads each certificate from disk in order to extract
+all the relevant information. If there are a large number of certificates in the
+trust store use this command sparingly.
+
+
+## How it Works
+
+When the trust-store starts it configures TLS listening sockets,
+whitelists the certificates in the given directory, then accepting
+sockets can query the trust-store with their client's certificate. It
+refreshes the whitelist to correspond with changes in the directory's
+contents, installing and removing certificate details, after a refresh
+interval or a manual refresh (by invoking a `rabbitmqctl eval
+'rabbit_trust_store:refresh().'` from the commandline).
+
+
+## Building from Source
+
+See [Plugin Development guide](http://www.rabbitmq.com/plugin-development.html).
+
+TL;DR: running
+
+    make dist
+
+will build the plugin and put build artifacts under the `./plugins` directory.
+
+
+## Copyright and License
+
+(c) Pivotal Software Inc, 2007-20016
+
+Released under the MPL, the same license as RabbitMQ.
diff --git a/deps/rabbitmq_trust_store/erlang.mk b/deps/rabbitmq_trust_store/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_trust_store/rabbitmq-components.mk b/deps/rabbitmq_trust_store/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store.erl
new file mode 100644 (file)
index 0000000..d10c857
--- /dev/null
@@ -0,0 +1,282 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_trust_store).
+-behaviour(gen_server).
+
+-export([mode/0, refresh/0, list/0]). %% Console Interface.
+-export([whitelisted/3, is_whitelisted/1]). %% Client-side Interface.
+-export([start/1, start_link/1]).
+-export([init/1, terminate/2,
+         handle_call/3, handle_cast/2,
+         handle_info/2,
+         code_change/3]).
+
+-include_lib("kernel/include/file.hrl").
+-include_lib("stdlib/include/ms_transform.hrl").
+-include_lib("public_key/include/public_key.hrl").
+
+-type certificate() :: #'OTPCertificate'{}.
+-type event()       :: valid_peer
+                     | valid
+                     | {bad_cert, Other :: atom()
+                                | unknown_ca
+                                | selfsigned_peer}
+                     | {extension, #'Extension'{}}.
+-type state()       :: confirmed | continue.
+-type outcome()     :: {valid, state()}
+                     | {fail, Reason :: term()}
+                     | {unknown, state()}.
+
+-record(entry, {filename :: string(), identifier :: tuple(), change_time :: integer()}).
+-record(state, {directory_change_time :: integer(), whitelist_directory :: string(), refresh_interval :: integer()}).
+
+
+%% OTP Supervision
+
+start(Settings) ->
+    gen_server:start(?MODULE, Settings, []).
+
+start_link(Settings) ->
+    gen_server:start_link({local, trust_store}, ?MODULE, Settings, []).
+
+
+%% Console Interface
+
+-spec mode() -> 'automatic' | 'manual'.
+mode() ->
+    gen_server:call(trust_store, mode).
+
+-spec refresh() -> integer().
+refresh() ->
+    gen_server:call(trust_store, refresh).
+
+-spec list() -> string().
+list() ->
+    gen_server:call(trust_store, list).
+
+%% Client (SSL Socket) Interface
+
+-spec whitelisted(certificate(), event(), state()) -> outcome().
+whitelisted(_, {bad_cert, unknown_ca}, confirmed) ->
+    {valid, confirmed};
+whitelisted(#'OTPCertificate'{}=C, {bad_cert, unknown_ca}, continue) ->
+    case is_whitelisted(C) of
+        true ->
+            {valid, confirmed};
+        false ->
+            {fail, "CA not known AND certificate not whitelisted"}
+    end;
+whitelisted(#'OTPCertificate'{}=C, {bad_cert, selfsigned_peer}, continue) ->
+    case is_whitelisted(C) of
+        true ->
+            {valid, confirmed};
+        false ->
+            {fail, "certificate not whitelisted"}
+    end;
+whitelisted(_, {bad_cert, _} = Reason, _) ->
+    {fail, Reason};
+whitelisted(_, valid, St) ->
+    {valid, St};
+whitelisted(#'OTPCertificate'{}=_, valid_peer, St) ->
+    {valid, St};
+whitelisted(_, {extension, _}, St) ->
+    {unknown, St}.
+
+-spec is_whitelisted(certificate()) -> boolean().
+is_whitelisted(#'OTPCertificate'{}=C) ->
+    #entry{identifier = Id} = extract_unique_attributes(C),
+    ets:member(table_name(), Id).
+
+
+%% Generic Server Callbacks
+
+init(Settings) ->
+    erlang:process_flag(trap_exit, true),
+    ets:new(table_name(), table_options()),
+    Path = path(Settings),
+    Interval = refresh_interval(Settings),
+    Initial = modification_time(Path),
+    tabulate(Path),
+    if
+        Interval =:= 0 ->
+            ok;
+        Interval  >  0 ->
+            erlang:send_after(Interval, erlang:self(), refresh)
+    end,
+    {ok,
+     #state{directory_change_time = Initial,
+      whitelist_directory = Path,
+      refresh_interval = Interval}}.
+
+handle_call(mode, _, St) ->
+    {reply, mode(St), St};
+handle_call(refresh, _, St) ->
+    {reply, refresh(St), St};
+handle_call(list, _, St) ->
+    {reply, list(St), St};
+handle_call(_, _, St) ->
+    {noreply, St}.
+
+handle_cast(_, St) ->
+    {noreply, St}.
+
+handle_info(refresh, #state{refresh_interval = Interval} = St) ->
+    New = refresh(St),
+    erlang:send_after(Interval, erlang:self(), refresh),
+    {noreply, St#state{directory_change_time = New}};
+handle_info(_, St) ->
+    {noreply, St}.
+
+terminate(shutdown, _St) ->
+    true = ets:delete(table_name()).
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+
+%% Ancillary & Constants
+
+list(#state{whitelist_directory = Path}) ->
+    Formatted =
+        [format_cert(Path, F, S) ||
+         #entry{filename = F, identifier = {_, S}} <- ets:tab2list(table_name())],
+    to_big_string(Formatted).
+
+mode(#state{refresh_interval = I}) ->
+    if
+        I =:= 0 -> 'manual';
+        I  >  0 -> 'automatic'
+    end.
+
+refresh(#state{whitelist_directory = Path, directory_change_time = Old}) ->
+    New = modification_time(Path),
+    case New > Old of
+        false ->
+            ok;
+        true  ->
+            tabulate(Path)
+    end,
+    New.
+
+refresh_interval(Pairs) ->
+    {refresh_interval, S} = lists:keyfind(refresh_interval, 1, Pairs),
+    timer:seconds(S).
+
+path(Pairs) ->
+    {directory, Path} = lists:keyfind(directory, 1, Pairs),
+    Path.
+
+table_name() ->
+    trust_store_whitelist.
+
+table_options() ->
+    [protected,
+     named_table,
+     set,
+     {keypos, #entry.identifier},
+     {heir, none}].
+
+modification_time(Path) ->
+    {ok, Info} = file:read_file_info(Path, [{time, posix}]),
+    Info#file_info.mtime.
+
+already_whitelisted_filenames() ->
+    ets:select(table_name(),
+        ets:fun2ms(fun (#entry{filename = N, change_time = T}) -> {N, T} end)).
+
+one_whitelisted_filename({Name, Time}) ->
+    ets:fun2ms(fun (#entry{filename = N, change_time = T}) when N =:= Name, T =:= Time -> true end).
+
+build_entry(Path, {Name, Time}) ->
+    Absolute    = filename:join(Path, Name),
+    Certificate = scan_then_parse(Absolute),
+    Unique      = extract_unique_attributes(Certificate),
+    Unique#entry{filename = Name, change_time = Time}.
+
+try_build_entry(Path, {Name, Time}) ->
+    try build_entry(Path, {Name, Time}) of
+        Entry ->
+            rabbit_log:info(
+              "trust store: loading certificate '~s'", [Name]),
+            {ok, Entry}
+    catch
+        _:Err ->
+            rabbit_log:error(
+              "trust store: failed to load certificate '~s', error: ~p",
+              [Name, Err]),
+            {error, Err}
+    end.
+
+do_insertions(Before, After, Path) ->
+    Entries = [try_build_entry(Path, NameTime) ||
+                       NameTime <- (After -- Before)],
+    [insert(Entry) || {ok, Entry} <- Entries].
+
+do_removals(Before, After) ->
+    [delete(NameTime) || NameTime <- (Before -- After)].
+
+get_new(Path) ->
+    {ok, New} = file:list_dir(Path),
+    [{X, modification_time(filename:absname(X, Path))} || X <- New].
+
+tabulate(Path) ->
+    Old = already_whitelisted_filenames(),
+    New = get_new(Path),
+    do_insertions(Old, New, Path),
+    do_removals(Old, New),
+    ok.
+
+delete({Name, Time}) ->
+    rabbit_log:info("removing certificate '~s'", [Name]),
+    ets:select_delete(table_name(), one_whitelisted_filename({Name, Time})).
+
+insert(Entry) ->
+    true = ets:insert(table_name(), Entry).
+
+scan_then_parse(Filename) when is_list(Filename) ->
+    {ok, Bin} = file:read_file(Filename),
+    [{'Certificate', Data, not_encrypted}] = public_key:pem_decode(Bin),
+    public_key:pkix_decode_cert(Data, otp).
+
+extract_unique_attributes(#'OTPCertificate'{}=C) ->
+    {Serial, Issuer} = case public_key:pkix_issuer_id(C, other) of
+        {error, _Reason} ->
+            {ok, Identifier} = public_key:pkix_issuer_id(C, self),
+            Identifier;
+        {ok, Identifier} ->
+            Identifier
+    end,
+    %% Why change the order of attributes? For the same reason we put
+    %% the *most significant figure* first (on the left hand side).
+    #entry{identifier = {Issuer, Serial}}.
+
+to_big_string(Formatted) ->
+    string:join([cert_to_string(X) || X <- Formatted], "~n~n").
+
+cert_to_string({Name, Serial, Subject, Issuer, Validity}) ->
+    Text =
+        io_lib:format("Name: ~s~nSerial: ~p | 0x~.16.0B~nSubject: ~s~nIssuer: ~s~nValidity: ~p~n",
+                     [ Name, Serial, Serial, Subject, Issuer, Validity]),
+    lists:flatten(Text).
+
+format_cert(Path, Name, Serial) ->
+    {ok, Bin} = file:read_file(filename:join(Path, Name)),
+    [{'Certificate', Data, not_encrypted}] = public_key:pem_decode(Bin),
+    Validity = rabbit_ssl:peer_cert_validity(Data),
+    Subject = rabbit_ssl:peer_cert_subject(Data),
+    Issuer = rabbit_ssl:peer_cert_issuer(Data),
+    {Name, Serial, Subject, Issuer, Validity}.
+
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl
new file mode 100644 (file)
index 0000000..1a2881f
--- /dev/null
@@ -0,0 +1,144 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_trust_store_app).
+-behaviour(application).
+-export([change_SSL_options/0]).
+-export([revert_SSL_options/0]).
+-export([start/2, stop/1]).
+-define(DIRECTORY_OR_FILE_NAME_EXISTS, eexist).
+
+
+-rabbit_boot_step({rabbit_trust_store, [
+    {description, "Change necessary SSL options."},
+    {mfa, {?MODULE, change_SSL_options, []}},
+    {cleanup, {?MODULE, revert_SSL_options, []}},
+    %% {requires, ...},
+    {enables, networking}]}).
+
+change_SSL_options() ->
+    After = case application:get_env(rabbit, ssl_options) of
+        undefined ->
+            Before = [],
+            edit(Before);
+        {ok, Before} when is_list(Before) ->
+            ok = application:set_env(rabbit, initial_SSL_options, Before),
+            edit(Before)
+    end,
+    ok = application:set_env(rabbit,
+        ssl_options, After).
+
+revert_SSL_options() ->
+    {ok, Cfg} = application:get_env(rabbit, initial_SSL_options),
+    ok = application:set_env(rabbit, ssl_options, Cfg).
+
+start(normal, _) ->
+
+    %% The below two are properties, that is, tuple of name/value.
+    Path = whitelist_path(),
+    Interval = refresh_interval_time(),
+
+    rabbit_trust_store_sup:start_link([Path, Interval]).
+
+stop(_) ->
+    ok.
+
+
+%% Ancillary & Constants
+
+edit(Options) ->
+    case proplists:get_value(verify_fun, Options) of
+        undefined ->
+            ok;
+        Val       ->
+            rabbit_log:warning("RabbitMQ trust store plugin is used "
+                               "and the verify_fun TLS option is set: ~p. "
+                               "It will be overwritten by the plugin.~n", [Val]),
+            ok
+    end,
+    %% Only enter those options neccessary for this application.
+    lists:keymerge(1, required_options(),
+        [{verify_fun, {delegate(), continue}},
+         {partial_chain, fun partial_chain/1} | Options]).
+
+delegate() -> fun rabbit_trust_store:whitelisted/3.
+
+partial_chain(Chain) ->
+    % special handling of clients that present a chain rather than just a peer cert.
+    case lists:reverse(Chain) of
+        [PeerDer, Ca | _] ->
+            Peer = public_key:pkix_decode_cert(PeerDer, otp),
+            % If the Peer is whitelisted make it's immediate Authority a trusted one.
+            % This means the peer will automatically be validated.
+            case rabbit_trust_store:is_whitelisted(Peer) of
+                true -> {trusted_ca, Ca};
+                false -> unknown_ca
+            end;
+        _ -> unknown_ca
+    end.
+
+required_options() ->
+    [{verify, verify_peer}, {fail_if_no_peer_cert, true}].
+
+whitelist_path() ->
+    Path = case application:get_env(rabbitmq_trust_store, directory) of
+        undefined ->
+            default_directory();
+        {ok, V} when is_binary(V) ->
+            binary_to_list(V);
+        {ok, V} when is_list(V) ->
+            V
+    end,
+    ok = ensure_directory(Path),
+    {directory, Path}.
+
+refresh_interval_time() ->
+    case application:get_env(rabbitmq_trust_store, refresh_interval) of
+        undefined ->
+            {refresh_interval, default_refresh_interval()};
+        {ok, S} when is_integer(S), S >= 0 ->
+            {refresh_interval, S};
+        {ok, {seconds, S}} when is_integer(S), S >= 0 ->
+            {refresh_interval, S}
+    end.
+
+default_directory() ->
+
+    %% Dismantle the directory tree: first the table & meta-data
+    %% directory, then the Mesia database directory, finally the node
+    %% directory where we will place the default whitelist in `Full`.
+
+    Table  = filename:split(rabbit_mnesia:dir()),
+    Mnesia = lists:droplast(Table),
+    Node   = lists:droplast(Mnesia),
+    Full = Node ++ ["trust_store", "whitelist"],
+    filename:join(Full).
+
+default_refresh_interval() ->
+    {ok, I} = application:get_env(rabbitmq_trust_store, default_refresh_interval),
+    I.
+
+ensure_directory(Path) ->
+    ok = ensure_parent_directories(Path),
+    case file:make_dir(Path) of
+        {error, ?DIRECTORY_OR_FILE_NAME_EXISTS} ->
+            true = filelib:is_dir(Path),
+            ok;
+        ok ->
+            ok
+    end.
+
+ensure_parent_directories(Path) ->
+    filelib:ensure_dir(Path).
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl
new file mode 100644 (file)
index 0000000..5e2562d
--- /dev/null
@@ -0,0 +1,37 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_trust_store_sup).
+-behaviour(supervisor).
+-export([start_link/1]).
+-export([init/1]).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+
+%% ...
+
+start_link(Settings) ->
+
+    supervisor:start_link({local, ?MODULE}, ?MODULE, Settings).
+
+
+%% ...
+
+init(Settings) ->
+    {ok,
+     {{one_for_one, 1, 5},
+      [{trust_store, {rabbit_trust_store, start_link, [Settings]},
+        permanent, timer:seconds(5), worker, [rabbit_trust_store]}]}}.
diff --git a/deps/rabbitmq_trust_store/src/rabbitmq_trust_store.app.src b/deps/rabbitmq_trust_store/src/rabbitmq_trust_store.app.src
new file mode 100644 (file)
index 0000000..b2ae324
--- /dev/null
@@ -0,0 +1,16 @@
+{application, rabbitmq_trust_store, [
+  {description, "Client certificate trust store. Provides a way to whitelist client x509 certificates."},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {mod, {rabbit_trust_store_app, []}},
+  {env, [
+         {default_refresh_interval, 30}
+        ]},
+  {applications, [
+    kernel,
+    stdlib,
+    rabbit_common,
+    rabbit
+  ]}
+]}.
diff --git a/deps/rabbitmq_web_dispatch/CODE_OF_CONDUCT.md b/deps/rabbitmq_web_dispatch/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_web_dispatch/CONTRIBUTING.md b/deps/rabbitmq_web_dispatch/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_web_dispatch/Makefile b/deps/rabbitmq_web_dispatch/Makefile
new file mode 100644 (file)
index 0000000..986acac
--- /dev/null
@@ -0,0 +1,15 @@
+PROJECT = rabbitmq_web_dispatch
+
+DEPS = rabbit_common rabbit mochiweb webmachine
+TEST_DEPS = rabbitmq_ct_helpers
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_web_dispatch/erlang.mk b/deps/rabbitmq_web_dispatch/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_web_dispatch/rabbitmq-components.mk b/deps/rabbitmq_web_dispatch/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 96%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch.erl
rename to deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl
index c328c1ffab9f925fbc0f1f66650306c6e6241c5e..5a7481d03583162d362b9ac3d4dc97d2aa753c72 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_web_dispatch).
@@ -87,12 +87,12 @@ static_context_handler(Prefix, Module, FSPath) ->
 %% static content from a specified directory.
 static_context_handler("", LocalPath) ->
     fun(Req) ->
-            "/" ++ Path = Req:get(raw_path),
+            "/" ++ Path = Req:get(path),
             serve_file(Req, Path, LocalPath)
     end;
 static_context_handler(Prefix, LocalPath) ->
     fun(Req) ->
-            "/" ++ Path = Req:get(raw_path),
+            "/" ++ Path = Req:get(path),
             case string:substr(Path, length(Prefix) + 1) of
                 ""        -> Req:respond({301, [{"Location", "/" ++ Prefix ++ "/"}], ""});
                 "/" ++ P  -> serve_file(Req, P, LocalPath)
similarity index 93%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_app.erl
rename to deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl
index 478b3fd7ea61f4ebbe070ddab9c6a1edeb16cdcf..9dd211706085233cd23732f7df0bb6b70a40d82f 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_web_dispatch_app).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_registry.erl
rename to deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl
index e15530adaf21be898815c51849bca9cab00e8b77..ff651eec50949b281884558b1f0793967fcc8f66 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_web_dispatch_registry).
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_sup.erl
rename to deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl
index 5582d47a36670b13699137ee7a70dccbde05a65a..22a293f7e2f2925c82f4540aaa97ba583b219068 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_web_dispatch_sup).
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch_util.erl
rename to deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl
index c031138eaa3818580aa3917ddc1079a74b12bb50..f8c116f6c0e5035f1b94ffc05608cdf43eb3b8e9 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_web_dispatch_util).
@@ -48,7 +48,7 @@ relativise0([_|From], To) ->
 relativise0([], To) ->
     To.
 
-unrelativise(F, "/"   ++ T) -> "/" ++ T;
+unrelativise(_, "/"   ++ T) -> "/" ++ T;
 unrelativise(F, "./"  ++ T) -> unrelativise(F, T);
 unrelativise(F, "../" ++ T) -> unrelativise(strip_tail(F), T);
 unrelativise(F, T)          -> case string:str(F, "/") of
similarity index 75%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbit_webmachine_error_handler.erl
rename to deps/rabbitmq_web_dispatch/src/rabbit_webmachine_error_handler.erl
index 849e5b9d14203fdec5451e9a1511b617f4eb5600..3d11529bf0adfecce3aa315975131f5bdbbbb3e8 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 %% We need to ensure all responses are application/json; anything
 
 render_error(Code, Req, Reason) ->
     case Req:has_response_body() of
-        {true, _}  -> maybe_log(Req, Reason),
-                      Req:response_body();
+        {true, _}  ->
+            maybe_log(Req, Reason),
+            {Body, ReqState0} = Req:response_body(),
+            {ok, ReqState} =
+                webmachine_request:remove_response_header("Content-Encoding",
+                                                          ReqState0),
+            {Body, ReqState};
         {false, _} -> render_error_body(Code, Req:trim_state(), Reason)
     end.
 
-render_error_body(404,  Req, Reason) -> error_body(404,  Req, "Not Found");
+render_error_body(404,  Req, _)      -> error_body(404,  Req, "Not Found");
 render_error_body(Code, Req, Reason) -> error_body(Code, Req, Reason).
 
 error_body(Code, Req, Reason) ->
-    {ok, ReqState} = Req:add_response_header("Content-Type","application/json"),
+    {ok, _ReqState0} = Req:add_response_header("Content-Type","application/json"),
+    {ok, ReqState} = Req:remove_response_header("Content-Encoding"),
     case Code of
         500 -> maybe_log(Req, Reason);
         _   -> ok
similarity index 62%
rename from rabbitmq-server/plugins-src/rabbitmq-web-dispatch/src/rabbitmq_web_dispatch.app.src
rename to deps/rabbitmq_web_dispatch/src/rabbitmq_web_dispatch.app.src
index 5e7dd4d3c2c7bb1ffbfb5d0d12808978e24a9dc4..0e06ccc248517a99874ca125b749e18d1d480623 100644 (file)
@@ -1,8 +1,8 @@
 {application, rabbitmq_web_dispatch,
  [{description, "RabbitMQ Web Dispatcher"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_web_dispatch_app, []}},
   {env, []},
-  {applications, [kernel, stdlib, mochiweb, webmachine]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, mochiweb, webmachine]}]}.
diff --git a/deps/rabbitmq_web_stomp/CODE_OF_CONDUCT.md b/deps/rabbitmq_web_stomp/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_web_stomp/CONTRIBUTING.md b/deps/rabbitmq_web_stomp/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_web_stomp/Makefile b/deps/rabbitmq_web_stomp/Makefile
new file mode 100644 (file)
index 0000000..f14154e
--- /dev/null
@@ -0,0 +1,35 @@
+PROJECT = rabbitmq_web_stomp
+
+DEPS = cowboy sockjs rabbit_common rabbit rabbitmq_stomp
+TEST_DEPS = rabbitmq_ct_helpers
+dep_cowboy_commit = 1.0.3
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+# We need to patch SockJS' Makefile to be able to pass ERLC_OPTS to it.
+.DEFAULT_GOAL = all
+deps:: patch-sockjs
+
+include rabbitmq-components.mk
+include erlang.mk
+
+# --------------------------------------------------------------------
+# Compilation.
+# --------------------------------------------------------------------
+
+SOCKJS_ERLC_OPTS += $(RMQ_ERLC_OPTS)
+export SOCKJS_ERLC_OPTS
+
+.PHONY: patch-sockjs
+patch-sockjs: $(DEPS_DIR)/sockjs
+       $(exec_verbose) if ! grep -qw SOCKJS_ERLC_OPTS $(DEPS_DIR)/sockjs/Makefile; then \
+               echo >> $(DEPS_DIR)/sockjs/Makefile; \
+               echo >> $(DEPS_DIR)/sockjs/Makefile; \
+               echo 'ERLC_OPTS += $$(SOCKJS_ERLC_OPTS)' >> $(DEPS_DIR)/sockjs/Makefile; \
+       fi
similarity index 98%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp/README.md
rename to deps/rabbitmq_web_stomp/README.md
index 48376d8f13c025d46e1dfb62ec48058d3aac41d4..0d37e738955191128112d53557a1de2a66e5af57 100644 (file)
@@ -1,4 +1,4 @@
-RabbitMQ-Web-Stomp plugin
+RabbitMQ Web STOMP plugin
 =========================
 
 This project is a simple bridge between "RabbitMQ-stomp" plugin and
diff --git a/deps/rabbitmq_web_stomp/erlang.mk b/deps/rabbitmq_web_stomp/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/deps/rabbitmq_web_stomp/rabbitmq-components.mk b/deps/rabbitmq_web_stomp/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_app.erl
rename to deps/rabbitmq_web_stomp/src/rabbit_ws_app.erl
index c0e3afe22ba3568608b3d464fa4e71c019a9a422..6929dfba52dfd1fc9f40b1cac919ba079e7ec339 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_ws_app).
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_ws_client.erl b/deps/rabbitmq_web_stomp/src/rabbit_ws_client.erl
new file mode 100644 (file)
index 0000000..1a7f88e
--- /dev/null
@@ -0,0 +1,255 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_ws_client).
+-behaviour(gen_server).
+
+-include_lib("rabbitmq_stomp/include/rabbit_stomp.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+-export([start_link/1]).
+-export([sockjs_msg/2, sockjs_closed/1]).
+
+-export([init/1, handle_call/3, handle_info/2, terminate/2,
+         code_change/3, handle_cast/2]).
+
+-record(state, {conn, proc_state, parse_state, stats_timer, connection, heartbeat_mode, heartbeat, heartbeat_sup}).
+
+%%----------------------------------------------------------------------------
+
+start_link(Params) ->
+    gen_server:start_link(?MODULE, Params, []).
+
+sockjs_msg(Pid, Data) ->
+    gen_server:cast(Pid, {sockjs_msg, Data}).
+
+sockjs_closed(Pid) ->
+    gen_server:cast(Pid, sockjs_closed).
+
+%%----------------------------------------------------------------------------
+
+init({SupPid, Conn, Heartbeat, Conn}) ->
+    ok = file_handle_cache:obtain(),
+    process_flag(trap_exit, true),
+    {ok, ProcessorState} = init_processor_state(Conn),
+    {ok, rabbit_event:init_stats_timer(
+           #state{conn           = Conn,
+                  proc_state     = ProcessorState,
+                  parse_state    = rabbit_stomp_frame:initial_state(),
+                  heartbeat_sup  = SupPid,
+                  heartbeat      = {none, none},
+                  heartbeat_mode = Heartbeat},
+           #state.stats_timer)}.
+
+init_processor_state(Conn) ->
+    SendFun = fun (_Sync, Data) ->
+                      Conn:send(Data),
+                      ok
+              end,
+    Info = Conn:info(),
+    Headers = proplists:get_value(headers, Info),
+
+    UseHTTPAuth = application:get_env(rabbitmq_web_stomp, use_http_auth, false),
+    StompConfig0 = #stomp_configuration{implicit_connect = false},
+
+    StompConfig = case UseHTTPAuth of
+        true ->
+            case lists:keyfind(authorization, 1, Headers) of
+                false ->
+                    %% We fall back to the default STOMP credentials.
+                    StompConfig0;
+                {_, AuthHd} ->
+                    {<<"basic">>, {HTTPLogin, HTTPPassCode}}
+                        = cowboy_http:token_ci(list_to_binary(AuthHd),
+                                               fun cowboy_http:authorization/2),
+                    StompConfig0#stomp_configuration{
+                      default_login = HTTPLogin,
+                      default_passcode = HTTPPassCode,
+                      force_default_creds = true}
+            end;
+        false ->
+            StompConfig0
+    end,
+
+    Sock = proplists:get_value(socket, Info),
+    {PeerAddr, _} = proplists:get_value(peername, Info),
+    AdapterInfo0 = #amqp_adapter_info{additional_info=Extra}
+        = amqp_connection:socket_adapter_info(Sock, {'Web STOMP', 0}),
+    %% Flow control is not supported for Web-STOMP connections.
+    AdapterInfo = AdapterInfo0#amqp_adapter_info{
+        additional_info=[{state, running}|Extra]},
+
+    ProcessorState = rabbit_stomp_processor:initial_state(
+        StompConfig,
+        {SendFun, AdapterInfo, none, PeerAddr}),
+    {ok, ProcessorState}.
+
+handle_cast({sockjs_msg, Data}, State = #state{proc_state  = ProcessorState,
+                                               parse_state = ParseState,
+                                               connection  = ConnPid}) ->
+    case process_received_bytes(Data, ProcessorState, ParseState, ConnPid) of
+        {ok, NewProcState, ParseState1, ConnPid1} ->
+            {noreply, ensure_stats_timer(State#state{
+                        parse_state = ParseState1,
+                        proc_state  = NewProcState,
+                        connection  = ConnPid1})};
+        {stop, Reason, NewProcState, ParseState1} ->
+            {stop, Reason, State#state{
+                                parse_state = ParseState1,
+                                proc_state  = NewProcState}}
+    end;
+
+handle_cast(sockjs_closed, State) ->
+    {stop, normal, State};
+
+handle_cast(client_timeout, State) ->
+    {stop, {shutdown, client_heartbeat_timeout}, State};
+
+handle_cast(Cast, State) ->
+    {stop, {odd_cast, Cast}, State}.
+
+%% TODO this is a bit rubbish - after the preview release we should
+%% make the credit_flow:send/1 invocation in
+%% rabbit_stomp_processor:process_frame/2 optional.
+handle_info({bump_credit, {_, _}}, State) ->
+    {noreply, State};
+
+handle_info(#'basic.consume_ok'{}, State) ->
+    {noreply, State};
+handle_info(#'basic.cancel_ok'{}, State) ->
+    {noreply, State};
+handle_info(#'basic.ack'{delivery_tag = Tag, multiple = IsMulti}, State) ->
+    ProcState = processor_state(State),
+    NewProcState = rabbit_stomp_processor:flush_pending_receipts(Tag,
+                                                                   IsMulti,
+                                                                   ProcState),
+    {noreply, processor_state(NewProcState, State)};
+handle_info({Delivery = #'basic.deliver'{},
+             #amqp_msg{props = Props, payload = Payload},
+             DeliveryCtx},
+             State) ->
+    ProcState = processor_state(State),
+    NewProcState = rabbit_stomp_processor:send_delivery(Delivery,
+                                                          Props,
+                                                          Payload,
+                                                          DeliveryCtx,
+                                                          ProcState),
+    {noreply, processor_state(NewProcState, State)};
+handle_info(#'basic.cancel'{consumer_tag = Ctag}, State) ->
+    ProcState = processor_state(State),
+    case rabbit_stomp_processor:cancel_consumer(Ctag, ProcState) of
+      {ok, NewProcState} ->
+        {noreply, processor_state(NewProcState, State)};
+      {stop, Reason, NewProcState} ->
+        {stop, Reason, processor_state(NewProcState, State)}
+    end;
+
+handle_info({start_heartbeats, _},
+            State = #state{heartbeat_mode = no_heartbeat}) ->
+    {noreply, State};
+
+handle_info({start_heartbeats, {0, 0}}, State) ->
+    {noreply, State};
+handle_info({start_heartbeats, {SendTimeout, ReceiveTimeout}},
+            State = #state{conn = Conn,
+                           heartbeat_sup = SupPid,
+                           heartbeat_mode = heartbeat}) ->
+    Info = Conn:info(),
+    Sock = proplists:get_value(socket, Info),
+    Pid = self(),
+    SendFun = fun () -> Conn:send(<<$\n>>), ok end,
+    ReceiveFun = fun() -> gen_server2:cast(Pid, client_timeout) end,
+    Heartbeat = rabbit_heartbeat:start(SupPid, Sock, SendTimeout,
+                                       SendFun, ReceiveTimeout, ReceiveFun),
+    {noreply, State#state{heartbeat = Heartbeat}};
+
+
+
+%%----------------------------------------------------------------------------
+handle_info({'EXIT', From, Reason}, State) ->
+  ProcState = processor_state(State),
+  case rabbit_stomp_processor:handle_exit(From, Reason, ProcState) of
+    {stop, Reason, NewProcState} ->
+        {stop, Reason, processor_state(NewProcState, State)};
+    unknown_exit ->
+        {stop, {connection_died, Reason}, State}
+  end;
+%%----------------------------------------------------------------------------
+
+handle_info(emit_stats, State) ->
+    {noreply, emit_stats(State)};
+
+handle_info(Info, State) ->
+    {stop, {odd_info, Info}, State}.
+
+
+
+handle_call(Request, _From, State) ->
+    {stop, {odd_request, Request}, State}.
+
+terminate(_Reason, State = #state{conn = Conn, proc_state = ProcessorState}) ->
+    maybe_emit_stats(State),
+    ok = file_handle_cache:release(),
+    rabbit_stomp_processor:flush_and_die(ProcessorState),
+    Conn:close(1000, "STOMP died"),
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+
+%%----------------------------------------------------------------------------
+
+
+process_received_bytes(Bytes, ProcessorState, ParseState, ConnPid) ->
+    case rabbit_stomp_frame:parse(Bytes, ParseState) of
+        {ok, Frame, Rest} ->
+            case rabbit_stomp_processor:process_frame(Frame, ProcessorState) of
+                {ok, NewProcState, ConnPid1} ->
+                    ParseState1 = rabbit_stomp_frame:initial_state(),
+                    process_received_bytes(Rest, NewProcState, ParseState1, ConnPid1);
+                {stop, Reason, NewProcState} ->
+                    {stop, Reason, NewProcState, ParseState}
+            end;
+        {more, ParseState1} ->
+            {ok, ProcessorState, ParseState1, ConnPid}
+    end.
+
+processor_state(#state{ proc_state = ProcState }) -> ProcState.
+processor_state(ProcState, #state{} = State) ->
+  State#state{ proc_state = ProcState}.
+
+%%----------------------------------------------------------------------------
+
+ensure_stats_timer(State) ->
+    rabbit_event:ensure_stats_timer(State, #state.stats_timer, emit_stats).
+
+maybe_emit_stats(State) ->
+    rabbit_event:if_enabled(State, #state.stats_timer,
+                                fun() -> emit_stats(State) end).
+
+emit_stats(State=#state{conn=Conn, connection=ConnPid}) ->
+    Info = Conn:info(),
+    Sock = proplists:get_value(socket, Info),
+    SockInfos = case rabbit_net:getstat(Sock,
+            [recv_oct, recv_cnt, send_oct, send_cnt, send_pend]) of
+        {ok,    SI} -> SI;
+        {error,  _} -> []
+    end,
+    Infos = [{pid, ConnPid}|SockInfos],
+    rabbit_event:notify(connection_stats, Infos),
+    State1 = rabbit_event:reset_stats_timer(State, #state.stats_timer),
+    State1.
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_ws_client_sup.erl b/deps/rabbitmq_web_stomp/src/rabbit_ws_client_sup.erl
new file mode 100644 (file)
index 0000000..57c3dc4
--- /dev/null
@@ -0,0 +1,40 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_ws_client_sup).
+-behaviour(supervisor2).
+
+-export([start_client/1]).
+-export([init/1]).
+
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+
+%% --------------------------------------------------------------------------
+
+start_client({Conn, Heartbeat}) ->
+    {ok, SupPid} = supervisor2:start_link(?MODULE, []),
+    {ok, Client} = supervisor2:start_child(
+                     SupPid, client_spec(SupPid, Conn, Heartbeat, Conn)),
+    {ok, SupPid, Client}.
+
+
+client_spec(SupPid, Conn, Heartbeat, Conn) ->
+    {rabbit_ws_client, {rabbit_ws_client, start_link, [{SupPid, Conn, Heartbeat, Conn}]},
+     intrinsic, ?WORKER_WAIT, worker, [rabbit_ws_client]}.
+
+init(_Any) ->
+    {ok, {{one_for_all, 0, 1}, []}}.
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_ws_handler.erl b/deps/rabbitmq_web_stomp/src/rabbit_ws_handler.erl
new file mode 100644 (file)
index 0000000..785814a
--- /dev/null
@@ -0,0 +1,125 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at http://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
+%%
+
+-module(rabbit_ws_handler).
+-behaviour(cowboy_websocket_handler).
+
+%% Websocket.
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+%% SockJS interface
+-export([info/1]).
+-export([send/2]).
+-export([close/3]).
+
+-record(state, {pid, type}).
+
+%% Websocket.
+
+init(_, _Req, _Opts) ->
+    {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_TransportName, Req0, [{type, FrameType}]) ->
+    Req = case cowboy_req:header(<<"sec-websocket-protocol">>, Req0) of
+        {undefined, _} -> Req0;
+        {ProtocolHd, _} ->
+            Protocols = parse_sec_websocket_protocol_req(ProtocolHd),
+            case filter_stomp_protocols(Protocols) of
+                [] -> Req0;
+                [StompProtocol|_] ->
+                    cowboy_req:set_resp_header(<<"sec-websocket-protocol">>,
+                        StompProtocol, Req0)
+            end
+    end,
+    {Peername, _} = cowboy_req:peer(Req),
+    [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+    {ok, Sockname} = Transport:sockname(Socket),
+    Headers = case cowboy_req:header(<<"authorization">>, Req) of
+        {undefined, _} -> [];
+        {AuthHd, _}    -> [{authorization, binary_to_list(AuthHd)}]
+    end,
+    Conn = {?MODULE, self(), [
+        {socket, Socket},
+        {peername, Peername},
+        {sockname, Sockname},
+        {headers, Headers}]},
+    {ok, _Sup, Pid} = rabbit_ws_sup:start_client({Conn, heartbeat}),
+    {ok, Req, #state{pid=Pid, type=FrameType}}.
+
+websocket_handle({text, Data}, Req, State=#state{pid=Pid}) ->
+    rabbit_ws_client:sockjs_msg(Pid, Data),
+    {ok, Req, State};
+websocket_handle({binary, Data}, Req, State=#state{pid=Pid}) ->
+    rabbit_ws_client:sockjs_msg(Pid, Data),
+    {ok, Req, State};
+websocket_handle(_Frame, Req, State) ->
+    {ok, Req, State}.
+
+websocket_info({send, Msg}, Req, State=#state{type=FrameType}) ->
+    {reply, {FrameType, Msg}, Req, State};
+websocket_info(Frame = {close, _, _}, Req, State) ->
+    {reply, Frame, Req, State};
+websocket_info(_Info, Req, State) ->
+    {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, #state{pid=Pid}) ->
+    rabbit_ws_client:sockjs_closed(Pid),
+    ok.
+
+%% When moving to Cowboy 2, this code should be replaced
+%% with a simple call to cow_http_hd:parse_sec_websocket_protocol_req/1.
+
+parse_sec_websocket_protocol_req(Bin) ->
+    Protocols = binary:split(Bin, [<<$,>>, <<$\s>>], [global]),
+    [P || P <- Protocols, P =/= <<>>].
+
+%% The protocols v10.stomp, v11.stomp and v12.stomp are registered
+%% at IANA: https://www.iana.org/assignments/websocket/websocket.xhtml
+
+filter_stomp_protocols(Protocols) ->
+    lists:reverse(lists:sort(lists:filter(
+        fun(<< "v1", C, ".stomp">>)
+            when C =:= $2; C =:= $1; C =:= $0 -> true;
+           (_) ->
+            false
+        end,
+        Protocols))).
+
+%% SockJS connection handling.
+
+%% The following functions are replicating the functionality
+%% found in sockjs_session. I am not too happy about using
+%% a tuple-call, but at the time of writing this code it is
+%% necessary in order to share the existing code with SockJS.
+%%
+%% Ideally all the STOMP interaction should be done from
+%% within the Websocket process. This could be a good refactoring
+%% once SockJS gets removed.
+
+info({?MODULE, _, Info}) ->
+    Info.
+
+send(Data, {?MODULE, Pid, _}) ->
+    Pid ! {send, Data},
+    ok.
+
+close(Code, Reason, {?MODULE, Pid, _}) ->
+    Pid ! {close, Code, Reason},
+    ok.
similarity index 53%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_sockjs.erl
rename to deps/rabbitmq_web_stomp/src/rabbit_ws_sockjs.erl
index 3cc7ebd72bb0633278b75b8886f8c307da2f6717..b66a1a80f877514357fac344703e221daed40a55 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_ws_sockjs).
 
 -spec init() -> ok.
 init() ->
-    Port = get_env(port, 55674),
+    %% The 'tcp_config' option may include the port, but we already have
+    %% a 'port' option, so we prioritize the 'port' option over the one
+    %% found in 'tcp_config', if any.
+    TCPConf0 = get_env(tcp_config, []),
+    {TCPConf, Port} = case application:get_env(rabbitmq_web_stomp, port) of
+        undefined ->
+            {TCPConf0, proplists:get_value(port, TCPConf0, 15674)};
+        {ok, Port0} ->
+            {[{port, Port0}|TCPConf0], Port0}
+    end,
+
+    WsFrame = get_env(ws_frame, text),
+    CowboyOpts = get_env(cowboy_opts, []),
+
     SockjsOpts = get_env(sockjs_opts, []) ++ [{logger, fun logger/3}],
 
     SockjsState = sockjs_handler:init_state(
                     <<"/stomp">>, fun service_stomp/3, {}, SockjsOpts),
-    VhostRoutes = [{[<<"stomp">>, '...'], sockjs_cowboy_handler, SockjsState}],
-    Routes = [{'_',  VhostRoutes}], % any vhost
-    NbAcceptors = get_env(nb_acceptors, 100),
-    cowboy:start_listener(http, NbAcceptors,
-                          cowboy_tcp_transport, [{port,     Port}],
-                          cowboy_http_protocol, [{dispatch, Routes}]),
+    VhostRoutes = [
+        {"/stomp/[...]", sockjs_cowboy_handler, SockjsState},
+        {"/ws", rabbit_ws_handler, [{type, WsFrame}]}
+    ],
+    Routes = cowboy_router:compile([{'_',  VhostRoutes}]), % any vhost
+    NumTcpAcceptors = case application:get_env(rabbitmq_web_stomp, num_tcp_acceptors) of
+        undefined -> get_env(num_acceptors, 10);
+        {ok, NumTcp}  -> NumTcp
+    end,
+    cowboy:start_http(http, NumTcpAcceptors,
+                      TCPConf,
+                      [{env, [{dispatch, Routes}]}|CowboyOpts]),
     rabbit_log:info("rabbit_web_stomp: listening for HTTP connections on ~s:~w~n",
                     ["0.0.0.0", Port]),
     case get_env(ssl_config, []) of
         [] ->
             ok;
-        Conf ->
+        TLSConf ->
             rabbit_networking:ensure_ssl(),
-            TLSPort = proplists:get_value(port, Conf),
-            cowboy:start_listener(https, NbAcceptors,
-                                  cowboy_ssl_transport, Conf,
-                                  cowboy_http_protocol, [{dispatch, Routes}]),
+            TLSPort = proplists:get_value(port, TLSConf),
+            NumSslAcceptors = case application:get_env(rabbitmq_web_stomp, num_ssl_acceptors) of
+                undefined -> get_env(num_acceptors, 1);
+                {ok, NumSsl}  -> NumSsl
+            end,
+            cowboy:start_https(https, NumSslAcceptors,
+                               TLSConf,
+                               [{env, [{dispatch, Routes}]}|CowboyOpts]),
             rabbit_log:info("rabbit_web_stomp: listening for HTTPS connections on ~s:~w~n",
                             ["0.0.0.0", TLSPort])
     end,
@@ -66,7 +89,7 @@ logger(_Service, Req, _Type) ->
 %% --------------------------------------------------------------------------
 
 service_stomp(Conn, init, _State) ->
-    {ok, _Sup, Pid} = rabbit_ws_sup:start_client({Conn}),
+    {ok, _Sup, Pid} = rabbit_ws_sup:start_client({Conn, no_heartbeat}),
     {ok, Pid};
 
 service_stomp(_Conn, {recv, Data}, Pid) ->
similarity index 94%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_sup.erl
rename to deps/rabbitmq_web_stomp/src/rabbit_ws_sup.erl
index e8463ace27c7efb3ce53dab001b1a581028377a8..3a64b2d5cbf31d02e52ba0db02832cff25edc54b 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_ws_sup).
diff --git a/deps/rabbitmq_web_stomp/src/rabbitmq_web_stomp.app.src b/deps/rabbitmq_web_stomp/src/rabbitmq_web_stomp.app.src
new file mode 100644 (file)
index 0000000..9abccc5
--- /dev/null
@@ -0,0 +1,18 @@
+{application, rabbitmq_web_stomp,
+ [
+  {description, "Rabbit WEB-STOMP - WebSockets to Stomp adapter"},
+  {vsn, "3.6.6"},
+  {modules, []},
+  {registered, []},
+  {mod, {rabbit_ws_app, []}},
+  {env, [{port, 15674},
+         {tcp_config, []},
+         {num_tcp_acceptors, 10},
+         {ssl_config, []},
+         {num_ssl_acceptors, 1},
+         {cowboy_opts, []},
+         {sockjs_opts, []},
+         {ws_frame, text},
+         {use_http_auth, false}]},
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_stomp, cowboy, sockjs]}
+ ]}.
diff --git a/deps/rabbitmq_web_stomp_examples/CODE_OF_CONDUCT.md b/deps/rabbitmq_web_stomp_examples/CODE_OF_CONDUCT.md
new file mode 100644 (file)
index 0000000..1f6ef1c
--- /dev/null
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+   without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_web_stomp_examples/CONTRIBUTING.md b/deps/rabbitmq_web_stomp_examples/CONTRIBUTING.md
new file mode 100644 (file)
index 0000000..45bbcbe
--- /dev/null
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_web_stomp_examples/Makefile b/deps/rabbitmq_web_stomp_examples/Makefile
new file mode 100644 (file)
index 0000000..c80fa97
--- /dev/null
@@ -0,0 +1,14 @@
+PROJECT = rabbitmq_web_stomp_examples
+
+DEPS = rabbit_common rabbit rabbitmq_web_dispatch rabbitmq_web_stomp
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_web_stomp_examples/erlang.mk b/deps/rabbitmq_web_stomp_examples/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
similarity index 92%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/bunny.html
rename to deps/rabbitmq_web_stomp_examples/priv/bunny.html
index 15f686df8c8262aa066cec1246126d8e8175dc16..282bcb401e29752a8c6128b2c72e651174f4b469 100644 (file)
@@ -1,7 +1,7 @@
 <!doctype html>
 <html><head>
     <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.1/jquery.min.js"></script>
-    <script src="sockjs-0.3.js"></script>
+    <script src="//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"></script>
     <script src="stomp.js"></script>
 
     <style>
@@ -108,13 +108,13 @@ else {
 }
 
 // Stomp.js boilerplate
-var ws = new SockJS('http://' + window.location.hostname + ':15674/stomp');
+if (location.search == '?ws') {
+    var ws = new WebSocket('ws://' + window.location.hostname + ':15674/ws');
+} else {
+    var ws = new SockJS('http://' + window.location.hostname + ':15674/stomp');
+}
 var client = Stomp.over(ws);
 
-// SockJS does not support heart-beat: disable heart-beats
-client.heartbeat.outgoing = 0;
-client.heartbeat.incoming = 0;
-
 client.debug = function() {
   if (window.console && console.log && console.log.apply) {
     console.log.apply(console, arguments);
similarity index 88%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/echo.html
rename to deps/rabbitmq_web_stomp_examples/priv/echo.html
index 2119dfd0f6006a818a3891aa6911f777fb1fbbb7..9dc21c658ce3754c15f8c04f0cbd1f117d59019b 100644 (file)
@@ -1,7 +1,7 @@
 <!DOCTYPE html>
 <html><head>
   <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js"></script>
-  <script src="sockjs-0.3.js"></script>
+  <script src="//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"></script>
   <script src="stomp.js"></script>
   <style>
       .box {
         };
 
       // Stomp.js boilerplate
-      var ws = new SockJS('http://' + window.location.hostname + ':15674/stomp');
-      var client = Stomp.over(ws);
+      if (location.search == '?ws') {
+          var client = Stomp.client('ws://' + window.location.hostname + ':15674/ws');
+      } else {
+          var ws = new SockJS('http://' + window.location.hostname + ':15674/stomp');
+          var client = Stomp.over(ws);
+      }
 
-      // SockJS does not support heart-beat: disable heart-beats
-      client.heartbeat.outgoing = 0;
-      client.heartbeat.incoming = 0;
       client.debug = pipe('#second');
 
       var print_first = pipe('#first', function(data) {
similarity index 62%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/index.html
rename to deps/rabbitmq_web_stomp_examples/priv/index.html
index f722addd85216ca6ca70eeddbf6db342b4df9ce8..7f7df34b01c2ae05d48fbd9ffe6630f80f78655e 100644 (file)
@@ -7,10 +7,17 @@
   </head>
   <body>
     <h1>RabbitMQ Web STOMP Examples</h1>
+    <h2>Over SockJS</h2>
     <ul class="menu">
       <li><a href="echo.html">Simple Echo Server</a></li>
       <li><a href="bunny.html">Bunny Drawing</a></li>
       <li><a href="temp-queue.html">Temporary Queue Example</a></li>
     </ul>
+    <h2>Over plain Websocket</h2>
+    <ul class="menu">
+      <li><a href="echo.html?ws">Simple Echo Server</a></li>
+      <li><a href="bunny.html?ws">Bunny Drawing</a></li>
+      <li><a href="temp-queue.html?ws">Temporary Queue Example</a></li>
+    </ul>
   </body>
 </html>
diff --git a/deps/rabbitmq_web_stomp_examples/priv/stomp.js b/deps/rabbitmq_web_stomp_examples/priv/stomp.js
new file mode 100644 (file)
index 0000000..9c30356
--- /dev/null
@@ -0,0 +1,501 @@
+// Generated by CoffeeScript 1.7.1
+
+/*
+   Stomp Over WebSocket http://www.jmesnil.net/stomp-websocket/doc/ | Apache License V2.0
+
+   Copyright (C) 2010-2013 [Jeff Mesnil](http://jmesnil.net/)
+   Copyright (C) 2012 [FuseSource, Inc.](http://fusesource.com)
+ */
+
+(function() {
+  var Byte, Client, Frame, Stomp,
+    __hasProp = {}.hasOwnProperty,
+    __slice = [].slice;
+
+  Byte = {
+    LF: '\x0A',
+    NULL: '\x00'
+  };
+
+  Frame = (function() {
+    var unmarshallSingle;
+
+    function Frame(command, headers, body) {
+      this.command = command;
+      this.headers = headers != null ? headers : {};
+      this.body = body != null ? body : '';
+    }
+
+    Frame.prototype.toString = function() {
+      var lines, name, skipContentLength, value, _ref;
+      lines = [this.command];
+      skipContentLength = this.headers['content-length'] === false ? true : false;
+      if (skipContentLength) {
+        delete this.headers['content-length'];
+      }
+      _ref = this.headers;
+      for (name in _ref) {
+        if (!__hasProp.call(_ref, name)) continue;
+        value = _ref[name];
+        lines.push("" + name + ":" + value);
+      }
+      if (this.body && !skipContentLength) {
+        lines.push("content-length:" + (Frame.sizeOfUTF8(this.body)));
+      }
+      lines.push(Byte.LF + this.body);
+      return lines.join(Byte.LF);
+    };
+
+    Frame.sizeOfUTF8 = function(s) {
+      if (s) {
+        return encodeURI(s).match(/%..|./g).length;
+      } else {
+        return 0;
+      }
+    };
+
+    unmarshallSingle = function(data) {
+      var body, chr, command, divider, headerLines, headers, i, idx, len, line, start, trim, _i, _j, _len, _ref, _ref1;
+      divider = data.search(RegExp("" + Byte.LF + Byte.LF));
+      headerLines = data.substring(0, divider).split(Byte.LF);
+      command = headerLines.shift();
+      headers = {};
+      trim = function(str) {
+        return str.replace(/^\s+|\s+$/g, '');
+      };
+      _ref = headerLines.reverse();
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        line = _ref[_i];
+        idx = line.indexOf(':');
+        headers[trim(line.substring(0, idx))] = trim(line.substring(idx + 1));
+      }
+      body = '';
+      start = divider + 2;
+      if (headers['content-length']) {
+        len = parseInt(headers['content-length']);
+        body = ('' + data).substring(start, start + len);
+      } else {
+        chr = null;
+        for (i = _j = start, _ref1 = data.length; start <= _ref1 ? _j < _ref1 : _j > _ref1; i = start <= _ref1 ? ++_j : --_j) {
+          chr = data.charAt(i);
+          if (chr === Byte.NULL) {
+            break;
+          }
+          body += chr;
+        }
+      }
+      return new Frame(command, headers, body);
+    };
+
+    Frame.unmarshall = function(datas) {
+      var frame, frames, last_frame, r;
+      frames = datas.split(RegExp("" + Byte.NULL + Byte.LF + "*"));
+      r = {
+        frames: [],
+        partial: ''
+      };
+      r.frames = (function() {
+        var _i, _len, _ref, _results;
+        _ref = frames.slice(0, -1);
+        _results = [];
+        for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+          frame = _ref[_i];
+          _results.push(unmarshallSingle(frame));
+        }
+        return _results;
+      })();
+      last_frame = frames.slice(-1)[0];
+      if (last_frame === Byte.LF || (last_frame.search(RegExp("" + Byte.NULL + Byte.LF + "*$"))) !== -1) {
+        r.frames.push(unmarshallSingle(last_frame));
+      } else {
+        r.partial = last_frame;
+      }
+      return r;
+    };
+
+    Frame.marshall = function(command, headers, body) {
+      var frame;
+      frame = new Frame(command, headers, body);
+      return frame.toString() + Byte.NULL;
+    };
+
+    return Frame;
+
+  })();
+
+  Client = (function() {
+    var now;
+
+    function Client(ws) {
+      this.ws = ws;
+      this.ws.binaryType = "arraybuffer";
+      this.counter = 0;
+      this.connected = false;
+      this.heartbeat = {
+        outgoing: 10000,
+        incoming: 10000
+      };
+      this.maxWebSocketFrameSize = 16 * 1024;
+      this.subscriptions = {};
+      this.partialData = '';
+    }
+
+    Client.prototype.debug = function(message) {
+      var _ref;
+      return typeof window !== "undefined" && window !== null ? (_ref = window.console) != null ? _ref.log(message) : void 0 : void 0;
+    };
+
+    now = function() {
+      if (Date.now) {
+        return Date.now();
+      } else {
+        return new Date().valueOf;
+      }
+    };
+
+    Client.prototype._transmit = function(command, headers, body) {
+      var out;
+      out = Frame.marshall(command, headers, body);
+      if (typeof this.debug === "function") {
+        this.debug(">>> " + out);
+      }
+      while (true) {
+        if (out.length > this.maxWebSocketFrameSize) {
+          this.ws.send(out.substring(0, this.maxWebSocketFrameSize));
+          out = out.substring(this.maxWebSocketFrameSize);
+          if (typeof this.debug === "function") {
+            this.debug("remaining = " + out.length);
+          }
+        } else {
+          return this.ws.send(out);
+        }
+      }
+    };
+
+    Client.prototype._setupHeartbeat = function(headers) {
+      var serverIncoming, serverOutgoing, ttl, v, _ref, _ref1;
+      if ((_ref = headers.version) !== Stomp.VERSIONS.V1_1 && _ref !== Stomp.VERSIONS.V1_2) {
+        return;
+      }
+      _ref1 = (function() {
+        var _i, _len, _ref1, _results;
+        _ref1 = headers['heart-beat'].split(",");
+        _results = [];
+        for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+          v = _ref1[_i];
+          _results.push(parseInt(v));
+        }
+        return _results;
+      })(), serverOutgoing = _ref1[0], serverIncoming = _ref1[1];
+      if (!(this.heartbeat.outgoing === 0 || serverIncoming === 0)) {
+        ttl = Math.max(this.heartbeat.outgoing, serverIncoming);
+        if (typeof this.debug === "function") {
+          this.debug("send PING every " + ttl + "ms");
+        }
+        this.pinger = Stomp.setInterval(ttl, (function(_this) {
+          return function() {
+            _this.ws.send(Byte.LF);
+            return typeof _this.debug === "function" ? _this.debug(">>> PING") : void 0;
+          };
+        })(this));
+      }
+      if (!(this.heartbeat.incoming === 0 || serverOutgoing === 0)) {
+        ttl = Math.max(this.heartbeat.incoming, serverOutgoing);
+        if (typeof this.debug === "function") {
+          this.debug("check PONG every " + ttl + "ms");
+        }
+        return this.ponger = Stomp.setInterval(ttl, (function(_this) {
+          return function() {
+            var delta;
+            delta = now() - _this.serverActivity;
+            if (delta > ttl * 2) {
+              if (typeof _this.debug === "function") {
+                _this.debug("did not receive server activity for the last " + delta + "ms");
+              }
+              return _this.ws.close();
+            }
+          };
+        })(this));
+      }
+    };
+
+    Client.prototype._parseConnect = function() {
+      var args, connectCallback, errorCallback, headers;
+      args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
+      headers = {};
+      switch (args.length) {
+        case 2:
+          headers = args[0], connectCallback = args[1];
+          break;
+        case 3:
+          if (args[1] instanceof Function) {
+            headers = args[0], connectCallback = args[1], errorCallback = args[2];
+          } else {
+            headers.login = args[0], headers.passcode = args[1], connectCallback = args[2];
+          }
+          break;
+        case 4:
+          headers.login = args[0], headers.passcode = args[1], connectCallback = args[2], errorCallback = args[3];
+          break;
+        default:
+          headers.login = args[0], headers.passcode = args[1], connectCallback = args[2], errorCallback = args[3], headers.host = args[4];
+      }
+      return [headers, connectCallback, errorCallback];
+    };
+
+    Client.prototype.connect = function() {
+      var args, errorCallback, headers, out;
+      args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
+      out = this._parseConnect.apply(this, args);
+      headers = out[0], this.connectCallback = out[1], errorCallback = out[2];
+      if (typeof this.debug === "function") {
+        this.debug("Opening Web Socket...");
+      }
+      this.ws.onmessage = (function(_this) {
+        return function(evt) {
+          var arr, c, client, data, frame, messageID, onreceive, subscription, unmarshalledData, _i, _len, _ref, _results;
+          data = typeof ArrayBuffer !== 'undefined' && evt.data instanceof ArrayBuffer ? (arr = new Uint8Array(evt.data), typeof _this.debug === "function" ? _this.debug("--- got data length: " + arr.length) : void 0, ((function() {
+            var _i, _len, _results;
+            _results = [];
+            for (_i = 0, _len = arr.length; _i < _len; _i++) {
+              c = arr[_i];
+              _results.push(String.fromCharCode(c));
+            }
+            return _results;
+          })()).join('')) : evt.data;
+          _this.serverActivity = now();
+          if (data === Byte.LF) {
+            if (typeof _this.debug === "function") {
+              _this.debug("<<< PONG");
+            }
+            return;
+          }
+          if (typeof _this.debug === "function") {
+            _this.debug("<<< " + data);
+          }
+          unmarshalledData = Frame.unmarshall(_this.partialData + data);
+          _this.partialData = unmarshalledData.partial;
+          _ref = unmarshalledData.frames;
+          _results = [];
+          for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+            frame = _ref[_i];
+            switch (frame.command) {
+              case "CONNECTED":
+                if (typeof _this.debug === "function") {
+                  _this.debug("connected to server " + frame.headers.server);
+                }
+                _this.connected = true;
+                _this._setupHeartbeat(frame.headers);
+                _results.push(typeof _this.connectCallback === "function" ? _this.connectCallback(frame) : void 0);
+                break;
+              case "MESSAGE":
+                subscription = frame.headers.subscription;
+                onreceive = _this.subscriptions[subscription] || _this.onreceive;
+                if (onreceive) {
+                  client = _this;
+                  messageID = frame.headers["message-id"];
+                  frame.ack = function(headers) {
+                    if (headers == null) {
+                      headers = {};
+                    }
+                    return client.ack(messageID, subscription, headers);
+                  };
+                  frame.nack = function(headers) {
+                    if (headers == null) {
+                      headers = {};
+                    }
+                    return client.nack(messageID, subscription, headers);
+                  };
+                  _results.push(onreceive(frame));
+                } else {
+                  _results.push(typeof _this.debug === "function" ? _this.debug("Unhandled received MESSAGE: " + frame) : void 0);
+                }
+                break;
+              case "RECEIPT":
+                _results.push(typeof _this.onreceipt === "function" ? _this.onreceipt(frame) : void 0);
+                break;
+              case "ERROR":
+                _results.push(typeof errorCallback === "function" ? errorCallback(frame) : void 0);
+                break;
+              default:
+                _results.push(typeof _this.debug === "function" ? _this.debug("Unhandled frame: " + frame) : void 0);
+            }
+          }
+          return _results;
+        };
+      })(this);
+      this.ws.onclose = (function(_this) {
+        return function() {
+          var msg;
+          msg = "Whoops! Lost connection to " + _this.ws.url;
+          if (typeof _this.debug === "function") {
+            _this.debug(msg);
+          }
+          _this._cleanUp();
+          return typeof errorCallback === "function" ? errorCallback(msg) : void 0;
+        };
+      })(this);
+      return this.ws.onopen = (function(_this) {
+        return function() {
+          if (typeof _this.debug === "function") {
+            _this.debug('Web Socket Opened...');
+          }
+          headers["accept-version"] = Stomp.VERSIONS.supportedVersions();
+          headers["heart-beat"] = [_this.heartbeat.outgoing, _this.heartbeat.incoming].join(',');
+          return _this._transmit("CONNECT", headers);
+        };
+      })(this);
+    };
+
+    Client.prototype.disconnect = function(disconnectCallback, headers) {
+      if (headers == null) {
+        headers = {};
+      }
+      this._transmit("DISCONNECT", headers);
+      this.ws.onclose = null;
+      this.ws.close();
+      this._cleanUp();
+      return typeof disconnectCallback === "function" ? disconnectCallback() : void 0;
+    };
+
+    Client.prototype._cleanUp = function() {
+      this.connected = false;
+      if (this.pinger) {
+        Stomp.clearInterval(this.pinger);
+      }
+      if (this.ponger) {
+        return Stomp.clearInterval(this.ponger);
+      }
+    };
+
+    Client.prototype.send = function(destination, headers, body) {
+      if (headers == null) {
+        headers = {};
+      }
+      if (body == null) {
+        body = '';
+      }
+      headers.destination = destination;
+      return this._transmit("SEND", headers, body);
+    };
+
+    Client.prototype.subscribe = function(destination, callback, headers) {
+      var client;
+      if (headers == null) {
+        headers = {};
+      }
+      if (!headers.id) {
+        headers.id = "sub-" + this.counter++;
+      }
+      headers.destination = destination;
+      this.subscriptions[headers.id] = callback;
+      this._transmit("SUBSCRIBE", headers);
+      client = this;
+      return {
+        id: headers.id,
+        unsubscribe: function() {
+          return client.unsubscribe(headers.id);
+        }
+      };
+    };
+
+    Client.prototype.unsubscribe = function(id) {
+      delete this.subscriptions[id];
+      return this._transmit("UNSUBSCRIBE", {
+        id: id
+      });
+    };
+
+    Client.prototype.begin = function(transaction) {
+      var client, txid;
+      txid = transaction || "tx-" + this.counter++;
+      this._transmit("BEGIN", {
+        transaction: txid
+      });
+      client = this;
+      return {
+        id: txid,
+        commit: function() {
+          return client.commit(txid);
+        },
+        abort: function() {
+          return client.abort(txid);
+        }
+      };
+    };
+
+    Client.prototype.commit = function(transaction) {
+      return this._transmit("COMMIT", {
+        transaction: transaction
+      });
+    };
+
+    Client.prototype.abort = function(transaction) {
+      return this._transmit("ABORT", {
+        transaction: transaction
+      });
+    };
+
+    Client.prototype.ack = function(messageID, subscription, headers) {
+      if (headers == null) {
+        headers = {};
+      }
+      headers["message-id"] = messageID;
+      headers.subscription = subscription;
+      return this._transmit("ACK", headers);
+    };
+
+    Client.prototype.nack = function(messageID, subscription, headers) {
+      if (headers == null) {
+        headers = {};
+      }
+      headers["message-id"] = messageID;
+      headers.subscription = subscription;
+      return this._transmit("NACK", headers);
+    };
+
+    return Client;
+
+  })();
+
+  Stomp = {
+    VERSIONS: {
+      V1_0: '1.0',
+      V1_1: '1.1',
+      V1_2: '1.2',
+      supportedVersions: function() {
+        return '1.1,1.0';
+      }
+    },
+    client: function(url, protocols) {
+      var klass, ws;
+      if (protocols == null) {
+        protocols = ['v10.stomp', 'v11.stomp'];
+      }
+      klass = Stomp.WebSocketClass || WebSocket;
+      ws = new klass(url, protocols);
+      return new Client(ws);
+    },
+    over: function(ws) {
+      return new Client(ws);
+    },
+    Frame: Frame
+  };
+
+  if (typeof exports !== "undefined" && exports !== null) {
+    exports.Stomp = Stomp;
+  }
+
+  if (typeof window !== "undefined" && window !== null) {
+    Stomp.setInterval = function(interval, f) {
+      return window.setInterval(f, interval);
+    };
+    Stomp.clearInterval = function(id) {
+      return window.clearInterval(id);
+    };
+    window.Stomp = Stomp;
+  } else if (!exports) {
+    self.Stomp = Stomp;
+  }
+
+}).call(this);
similarity index 91%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/temp-queue.html
rename to deps/rabbitmq_web_stomp_examples/priv/temp-queue.html
index 026f1ecaab73d1181823c916790dcc205cef7a1a..88a7549dc8e3d7c77ad0930c3328996b2ba2c697 100644 (file)
@@ -1,7 +1,7 @@
 <!DOCTYPE html>
 <html><head>
   <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js"></script>
-  <script src="sockjs-0.3.js"></script>
+  <script src="//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"></script>
   <script src="stomp.js"></script>
   <style>
       .box {
     </div>
 
     <script>
-      var ws = new SockJS('http://' + window.location.hostname + ':15674/stomp');
+      if (location.search == '?ws') {
+          var ws = new WebSocket('ws://' + window.location.hostname + ':15674/ws');
+      } else {
+          var ws = new SockJS('http://' + window.location.hostname + ':15674/stomp');
+      }
       var client = Stomp.over(ws);
-      // SockJS does not support heart-beat: disable heart-beats
-      client.heartbeat.incoming = 0;
-      client.heartbeat.outgoing = 0;
 
       client.debug = function(e) {
         $('#second div').append($("<code>").text(e));
diff --git a/deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk b/deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
similarity index 95%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/src/rabbit_web_stomp_examples_app.erl
rename to deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl
index 91a3c2734bb199be418836dbe0d59b448f70ee4e..3bfd76d87c65b879e21c946cf541527712d442bb 100644 (file)
@@ -11,7 +11,7 @@
 %% The Original Code is RabbitMQ.
 %%
 %% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
+%% Copyright (c) 2007-2016 Pivotal Software, Inc.  All rights reserved.
 %%
 
 -module(rabbit_web_stomp_examples_app).
similarity index 63%
rename from rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/src/rabbitmq_web_stomp_examples.app.src
rename to deps/rabbitmq_web_stomp_examples/src/rabbitmq_web_stomp_examples.app.src
index 832438caa165fa0eb1eba7c89a0b6fdee0f68aef..9de1420e4f10d0af7a7c52f196de38ee82b17529 100644 (file)
@@ -1,8 +1,8 @@
 {application, rabbitmq_web_stomp_examples,
  [{description, "Rabbit WEB-STOMP - examples"},
-  {vsn, "%%VSN%%"},
+  {vsn, "3.6.6"},
   {modules, []},
   {registered, []},
   {mod, {rabbit_web_stomp_examples_app, []}},
   {env, [{listener, [{port, 15670}]}]},
-  {applications, [kernel, stdlib, rabbitmq_web_dispatch, rabbitmq_web_stomp]}]}.
+  {applications, [kernel, stdlib, rabbit_common, rabbit, rabbitmq_web_dispatch, rabbitmq_web_stomp]}]}.
diff --git a/deps/ranch/AUTHORS b/deps/ranch/AUTHORS
new file mode 100644 (file)
index 0000000..9b6374c
--- /dev/null
@@ -0,0 +1,33 @@
+Ranch is available thanks to the work of:
+
+Loïc Hoguin
+James Fish
+Andrew Majorov
+Ransom Richardson
+Fred Hebert
+Geoff Cant
+Klaus Trainer
+josh rotenberg
+0x00F6
+Alexander Zhuravlev
+Ali Sabil
+Andre Graf
+Andrew Thompson
+Jihyun Yu
+Slava Yurin
+Stéphane Wirtel
+Xiao Jia
+
+The Ranch code was initially part of Cowboy. Before it was
+split into a separate project, the following people worked
+on the code that then became Ranch:
+
+Loïc Hoguin
+Ali Sabil
+Andrew Thompson
+DeadZen
+Hunter Morris
+Jesper Louis Andersen
+Paul Oliver
+Roberto Ostinelli
+Steven Gravell
diff --git a/deps/ranch/CHANGELOG.asciidoc b/deps/ranch/CHANGELOG.asciidoc
new file mode 100644 (file)
index 0000000..1f47bb6
--- /dev/null
@@ -0,0 +1,29 @@
+= CHANGELOG
+
+== 1.2.0
+
+
+* Allow the supervised process and the process owning the socket to be different
+* Add many transport options (please refer to the documentation)
+* Add function ranch:get_addr/1 to retrieve both IP and port of listener
+* Don't pass Ranch-specific options down to transports
+** Should make Dialyzer happy in user projects.
+** New types ranch:opt(), ranch_tcp:opt(), ranch_ssl:ssl_opt() and ranch_ssl:opt()
+* Fix crash when filtering unknown options out
+* Print a warning for each option filtered out
+* Handle Transport:controlling_socket/2 errors and close the socket
+* Handle Protocol:start_link/4 crashes to avoid killing all active connections
+* Use Asciidoc for documentation.
+* Test Ranch across 14 Erlang versions on CircleCI.
+* Improve and document test suites with recent ct_helper improvements.
+* Fix a number of intermittent test issues.
+
+== 1.1.0
+
+* Add Transport:secure/0
+* Add SSL partial_chain option
+* Stop reporting errors on {error, closed} in accept_ack
+
+== 1.0.0
+
+* Initial release.
diff --git a/deps/ranch/LICENSE b/deps/ranch/LICENSE
new file mode 100644 (file)
index 0000000..4dda821
--- /dev/null
@@ -0,0 +1,13 @@
+Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/ranch/Makefile b/deps/ranch/Makefile
new file mode 100644 (file)
index 0000000..a1f6887
--- /dev/null
@@ -0,0 +1,40 @@
+# See LICENSE for licensing information.
+
+PROJECT = ranch
+
+# Options.
+
+COMPILE_FIRST = ranch_transport
+CT_OPTS += -pa test -ct_hooks ranch_ct_hook []
+PLT_APPS = crypto public_key ssl
+
+CI_OTP = \
+       OTP_R15B01 OTP_R15B02 OTP_R15B03-1 \
+       OTP_R16B OTP_R16B01 OTP_R16B02 OTP_R16B03-1 \
+       OTP-17.0.2 OTP-17.1.2 OTP-17.2.2 OTP-17.3.4 OTP-17.4.1 OTP-17.5.6.6 \
+       OTP-18.0.3 OTP-18.1.5 OTP-18.2.1
+
+# Only test on the most recent version on public CI services.
+ifdef CI
+ifndef BUILDKITE
+CI_OTP := $(lastword $(CI_OTP))
+endif
+endif
+
+# Dependencies.
+
+TEST_DEPS = ct_helper
+dep_ct_helper = git https://github.com/ninenines/ct_helper master
+
+# Standard targets.
+
+include erlang.mk
+
+# Also dialyze the tests.
+
+DIALYZER_OPTS += --src -r test
+
+# Use erl_make_certs from the tested release.
+
+ci-setup:: $(DEPS_DIR)/ct_helper
+       cp ~/.kerl/builds/$(CI_OTP_RELEASE)/otp_src_git/lib/ssl/test/erl_make_certs.erl deps/ct_helper/src/
diff --git a/deps/ranch/README.asciidoc b/deps/ranch/README.asciidoc
new file mode 100644 (file)
index 0000000..27ea466
--- /dev/null
@@ -0,0 +1,30 @@
+= Ranch
+
+Ranch is a socket acceptor pool for TCP protocols.
+
+== Goals
+
+Ranch aims to provide everything you need to accept TCP connections with
+a **small** code base and **low latency** while being easy to use directly
+as an application or to **embed** into your own.
+
+Ranch provides a **modular** design, letting you choose which transport
+and protocol are going to be used for a particular listener. Listeners
+accept and manage connections on one port, and include facilities to
+limit the number of **concurrent** connections. Connections are sorted
+into **pools**, each pool having a different configurable limit.
+
+Ranch also allows you to **upgrade** the acceptor pool without having
+to close any of the currently opened sockets.
+
+== Getting started
+
+* link:doc/src/guide/book.asciidoc[Read the guide]
+* link:doc/src/manual/[Check the manual]
+* Look at the examples in the `examples/` directory
+
+== Support
+
+* Official IRC Channel: #ninenines on irc.freenode.net
+* http://lists.ninenines.eu[Mailing Lists]
+* http://ninenines.eu/support[Commercial Support]
diff --git a/deps/ranch/appveyor.yml b/deps/ranch/appveyor.yml
new file mode 100644 (file)
index 0000000..70f0ced
--- /dev/null
@@ -0,0 +1,7 @@
+build_script:
+- C:\msys64\usr\bin\bash -lc "pacman --needed --noconfirm -Sy pacman-mirrors"
+- C:\msys64\usr\bin\bash -lc "pacman --noconfirm -Sy"
+- C:\msys64\usr\bin\bash -lc "pacman --noconfirm -S git make"
+test_script:
+- C:\msys64\usr\bin\bash -lc "cd $APPVEYOR_BUILD_FOLDER && make dialyze"
+- C:\msys64\usr\bin\bash -lc "cd $APPVEYOR_BUILD_FOLDER && make tests"
diff --git a/deps/ranch/circle.yml b/deps/ranch/circle.yml
new file mode 100644 (file)
index 0000000..330552f
--- /dev/null
@@ -0,0 +1,22 @@
+general:
+  artifacts:
+    - "logs"
+
+dependencies:
+  cache_directories:
+    - "~/.kerl"
+    - "~/erlang"
+
+  pre:
+    - sudo apt-get update
+    - sudo apt-get install autoconf2.59
+    - cd $HOME/bin && ln -s /usr/bin/autoconf2.59 autoconf
+    - cd $HOME/bin && ln -s /usr/bin/autoheader2.59 autoheader
+    - make ci-prepare:
+        timeout: 7200
+
+test:
+  override:
+    - source $HOME/erlang/OTP-*/activate && make dialyze
+    - make -k ci:
+        timeout: 3600
diff --git a/deps/ranch/doc/src/guide/book.asciidoc b/deps/ranch/doc/src/guide/book.asciidoc
new file mode 100644 (file)
index 0000000..59e8e70
--- /dev/null
@@ -0,0 +1,20 @@
+// a2x: --dblatex-opts "-P latex.output.revhistory=0 -P doc.publisher.show=0 -P index.numbered=0"
+// a2x: -d book --attribute tabsize=4
+
+= Ranch User Guide
+
+include::introduction.asciidoc[Introduction]
+
+include::listeners.asciidoc[Listeners]
+
+include::transports.asciidoc[Transports]
+
+include::protocols.asciidoc[Protocols]
+
+include::embedded.asciidoc[Embedded mode]
+
+include::parsers.asciidoc[Writing parsers]
+
+include::ssl_auth.asciidoc[SSL client authentication]
+
+include::internals.asciidoc[Internals]
diff --git a/deps/ranch/doc/src/guide/embedded.asciidoc b/deps/ranch/doc/src/guide/embedded.asciidoc
new file mode 100644 (file)
index 0000000..593a807
--- /dev/null
@@ -0,0 +1,48 @@
+== Embedded mode
+
+Embedded mode allows you to insert Ranch listeners directly
+in your supervision tree. This allows for greater fault tolerance
+control by permitting the shutdown of a listener due to the
+failure of another part of the application and vice versa.
+
+=== Embedding
+
+To embed Ranch in your application you can simply add the child specs
+to your supervision tree. This can all be done in the `init/1` function
+of one of your application supervisors.
+
+Ranch requires at the minimum two kinds of child specs for embedding.
+First, you need to add `ranch_sup` to your supervision tree, only once,
+regardless of the number of listeners you will use. Then you need to
+add the child specs for each listener.
+
+Ranch has a convenience function for getting the listeners child specs
+called `ranch:child_spec/6`, that works like `ranch:start_listener/6`,
+except that it doesn't start anything, it only returns child specs.
+
+As for `ranch_sup`, the child spec is simple enough to not require a
+convenience function.
+
+The following example adds both `ranch_sup` and one listener to another
+application's supervision tree.
+
+.Embed Ranch directly in your supervision tree
+
+[source,erlang]
+----
+init([]) ->
+       RanchSupSpec = {ranch_sup, {ranch_sup, start_link, []},
+               permanent, 5000, supervisor, [ranch_sup]},
+       ListenerSpec = ranch:child_spec(echo, 100,
+               ranch_tcp, [{port, 5555}],
+               echo_protocol, []
+       ),
+       {ok, {{one_for_one, 10, 10}, [RanchSupSpec, ListenerSpec]}}.
+----
+
+Remember, you can add as many listener child specs as needed, but only
+one `ranch_sup` spec!
+
+It is recommended that your architecture makes sure that all listeners
+are restarted if `ranch_sup` fails. See the Ranch internals chapter for
+more details on how Ranch does it.
diff --git a/deps/ranch/doc/src/guide/internals.asciidoc b/deps/ranch/doc/src/guide/internals.asciidoc
new file mode 100644 (file)
index 0000000..fa63f1d
--- /dev/null
@@ -0,0 +1,94 @@
+== Internals
+
+This chapter may not apply to embedded Ranch as embedding allows you
+to use an architecture specific to your application, which may or may
+not be compatible with the description of the Ranch application.
+
+Note that for everything related to efficiency and performance,
+you should perform the benchmarks yourself to get the numbers that
+matter to you. Generic benchmarks found on the web may or may not
+be of use to you, you can never know until you benchmark your own
+system.
+
+=== Architecture
+
+Ranch is an OTP application.
+
+Like all OTP applications, Ranch has a top supervisor. It is responsible
+for supervising the `ranch_server` process and all the listeners that
+will be started.
+
+The `ranch_server` gen_server is a central process keeping track of the
+listeners and their acceptors. It does so through the use of a public ets
+table called `ranch_server`. The table is owned by the top supervisor
+to improve fault tolerance. This way if the `ranch_server` gen_server
+fails, it doesn't lose any information and the restarted process can
+continue as if nothing happened.
+
+Ranch uses a custom supervisor for managing connections. This supervisor
+keeps track of the number of connections and handles connection limits
+directly. While it is heavily optimized to perform the task of creating
+connection processes for accepted connections, it is still following the
+OTP principles and the usual `sys` and `supervisor` calls will work on
+it as expected.
+
+Listeners are grouped into the `ranch_listener_sup` supervisor and
+consist of three kinds of processes: the listener gen_server, the
+acceptor processes and the connection processes, both grouped under
+their own supervisor. All of these processes are registered to the
+`ranch_server` gen_server with varying amount of information.
+
+All socket operations, including listening for connections, go through
+transport handlers. Accepted connections are given to the protocol handler.
+Transport handlers are simple callback modules for performing operations on
+sockets. Protocol handlers start a new process, which receives socket
+ownership, with no requirements on how the code should be written inside
+that new process.
+
+=== Number of acceptors
+
+The second argument to `ranch:start_listener/6` is the number of
+processes that will be accepting connections. Care should be taken
+when choosing this number.
+
+First of all, it should not be confused with the maximum number
+of connections. Acceptor processes are only used for accepting and
+have nothing else in common with connection processes. Therefore
+there is nothing to be gained from setting this number too high,
+in fact it can slow everything else down.
+
+Second, this number should be high enough to allow Ranch to accept
+connections concurrently. But the number of cores available doesn't
+seem to be the only factor for choosing this number, as we can
+observe faster accepts if we have more acceptors than cores. It
+might be entirely dependent on the protocol, however.
+
+Our observations suggest that using 100 acceptors on modern hardware
+is a good solution, as it's big enough to always have acceptors ready
+and it's low enough that it doesn't have a negative impact on the
+system's performances.
+
+=== Platform-specific TCP features
+
+Some socket options are platform-specific and not supported by `inet`.
+They can be of interest because they generally are related to
+optimizations provided by the underlying OS. They can still be enabled
+thanks to the `raw` option, for which we will see an example.
+
+One of these features is `TCP_DEFER_ACCEPT` on Linux. It is a simplified
+accept mechanism which will wait for application data to come in before
+handing out the connection to the Erlang process.
+
+This is especially useful if you expect many connections to be mostly
+idle, perhaps part of a connection pool. They can be handled by the
+kernel directly until they send any real data, instead of allocating
+resources to idle connections.
+
+To enable this mechanism, the following option can be used.
+
+.Using raw transport options
+
+[source,erlang]
+{raw, 6, 9, << 30:32/native >>}
+
+It means go on layer 6, turn on option 9 with the given integer parameter.
diff --git a/deps/ranch/doc/src/guide/introduction.asciidoc b/deps/ranch/doc/src/guide/introduction.asciidoc
new file mode 100644 (file)
index 0000000..3199fb2
--- /dev/null
@@ -0,0 +1,25 @@
+== Introduction
+
+Ranch is a socket acceptor pool for TCP protocols.
+
+Ranch aims to provide everything you need to accept TCP connections
+with a small code base and low latency while being easy to use directly
+as an application or to embed into your own.
+
+=== Prerequisites
+
+It is assumed the developer already knows Erlang and has some experience
+with socket programming and TCP protocols.
+
+=== Supported platforms
+
+Ranch is tested and supported on Linux.
+
+Ranch is developed for Erlang R15B01+.
+
+Ranch may be compiled on earlier Erlang versions with small source code
+modifications but there is no guarantee that it will work as expected.
+
+=== Versioning
+
+Ranch uses http://semver.org/[Semantic Versioning 2.0.0]
diff --git a/deps/ranch/doc/src/guide/listeners.asciidoc b/deps/ranch/doc/src/guide/listeners.asciidoc
new file mode 100644 (file)
index 0000000..ef2d49c
--- /dev/null
@@ -0,0 +1,251 @@
+== Listeners
+
+A listener is a set of processes whose role is to listen on a port
+for new connections. It manages a pool of acceptor processes, each
+of them indefinitely accepting connections. When it does, it starts
+a new process executing the protocol handler code. All the socket
+programming is abstracted through the user of transport handlers.
+
+The listener takes care of supervising all the acceptor and connection
+processes, allowing developers to focus on building their application.
+
+=== Starting a listener
+
+Ranch does nothing by default. It is up to the application developer
+to request that Ranch listens for connections.
+
+A listener can be started and stopped at will.
+
+When starting a listener, a number of different settings are required:
+
+* A name to identify it locally and be able to interact with it.
+* The number of acceptors in the pool.
+* A transport handler and its associated options.
+* A protocol handler and its associated options.
+
+Ranch includes both TCP and SSL transport handlers, respectively
+`ranch_tcp` and `ranch_ssl`.
+
+A listener can be started by calling the `ranch:start_listener/6`
+function. Before doing so however, you must ensure that the `ranch`
+application is started.
+
+.Starting the Ranch application
+
+[source,erlang]
+ok = application:start(ranch).
+
+You are then ready to start a listener. Let's call it `tcp_echo`. It will
+have a pool of 100 acceptors, use a TCP transport and forward connections
+to the `echo_protocol` handler.
+
+.Starting a listener for TCP connections on port 5555
+
+[source,erlang]
+{ok, _} = ranch:start_listener(tcp_echo, 100,
+       ranch_tcp, [{port, 5555}],
+       echo_protocol, []
+).
+
+You can try this out by compiling and running the `tcp_echo` example in the
+examples directory. To do so, open a shell in the 'examples/tcp_echo/'
+directory and run the following command:
+
+.Building and starting a Ranch example
+
+[source,bash]
+$ make run
+
+You can then connect to it using telnet and see the echo server reply
+everything you send to it. Then when you're done testing, you can use
+the `Ctrl+]` key to escape to the telnet command line and type
+`quit` to exit.
+
+.Connecting to the example listener with telnet
+
+[source,bash]
+----
+$ telnet localhost 5555
+Trying 127.0.0.1...
+Connected to localhost.
+Escape character is '^]'.
+Hello!
+Hello!
+It works!
+It works!
+^]
+
+telnet> quit
+Connection closed.
+----
+
+=== Stopping a listener
+
+All you need to stop a Ranch listener is to call the
+`ranch:stop_listener/1` function with the listener's name
+as argument. In the previous section we started the listener
+named `tcp_echo`. We can now stop it.
+
+.Stopping a listener
+
+[source,erlang]
+ranch:stop_listener(tcp_echo).
+
+=== Default transport options
+
+By default the socket will be set to return `binary` data, with the
+options `{active, false}`, `{packet, raw}`, `{reuseaddr, true}` set.
+These values can't be overriden when starting the listener, but
+they can be overriden using `Transport:setopts/2` in the protocol.
+
+It will also set `{backlog, 1024}` and `{nodelay, true}`, which
+can be overriden at listener startup.
+
+=== Listening on a random port
+
+You do not have to specify a specific port to listen on. If you give
+the port number 0, or if you omit the port number entirely, Ranch will
+start listening on a random port.
+
+You can retrieve this port number by calling `ranch:get_port/1`. The
+argument is the name of the listener you gave in `ranch:start_listener/6`.
+
+.Starting a listener for TCP connections on a random port
+
+[source,erlang]
+{ok, _} = ranch:start_listener(tcp_echo, 100,
+       ranch_tcp, [{port, 0}],
+       echo_protocol, []
+).
+Port = ranch:get_port(tcp_echo).
+
+=== Listening on privileged ports
+
+Some systems limit access to ports below 1024 for security reasons.
+This can easily be identified by an `{error, eacces}` error when trying
+to open a listening socket on such a port.
+
+The methods for listening on privileged ports vary between systems,
+please refer to your system's documentation for more information.
+
+We recommend the use of port rewriting for systems with a single server,
+and load balancing for systems with multiple servers. Documenting these
+solutions is however out of the scope of this guide.
+
+=== Accepting connections on an existing socket
+
+If you want to accept connections on an existing socket, you can use the
+`socket` transport option, which should just be the relevant data returned
+from the connect function for the transport or the underlying socket library
+(`gen_tcp:connect`, `ssl:connect`). The accept function will then be
+called on the passed in socket. You should connect the socket in
+`{active, false}` mode, as well.
+
+Note, however, that because of a bug in SSL, you cannot change ownership of an
+SSL listen socket prior to R16. Ranch will catch the error thrown, but the
+owner of the SSL socket will remain as whatever process created the socket.
+However, this will not affect accept behaviour unless the owner process dies,
+in which case the socket is closed. Therefore, to use this feature with SSL
+with an erlang release prior to R16, ensure that the SSL socket is opened in a
+persistant process.
+
+=== Limiting the number of concurrent connections
+
+The `max_connections` transport option allows you to limit the number
+of concurrent connections. It defaults to 1024. Its purpose is to
+prevent your system from being overloaded and ensuring all the
+connections are handled optimally.
+
+.Customizing the maximum number of concurrent connections
+
+[source,erlang]
+{ok, _} = ranch:start_listener(tcp_echo, 100,
+       ranch_tcp, [{port, 5555}, {max_connections, 100}],
+       echo_protocol, []
+).
+
+You can disable this limit by setting its value to the atom `infinity`.
+
+.Disabling the limit for the number of connections
+
+[source,erlang]
+{ok, _} = ranch:start_listener(tcp_echo, 100,
+       ranch_tcp, [{port, 5555}, {max_connections, infinity}],
+       echo_protocol, []
+).
+
+You may not always want connections to be counted when checking for
+`max_connections`. For example you might have a protocol where both
+short-lived and long-lived connections are possible. If the long-lived
+connections are mostly waiting for messages, then they don't consume
+much resources and can safely be removed from the count.
+
+To remove the connection from the count, you must call the
+`ranch:remove_connection/1` from within the connection process,
+with the name of the listener as the only argument.
+
+.Removing a connection from the count of connections
+
+[source,erlang]
+ranch:remove_connection(Ref).
+
+As seen in the chapter covering protocols, this pid is received as the
+first argument of the protocol's `start_link/4` callback.
+
+You can modify the `max_connections` value on a running listener by
+using the `ranch:set_max_connections/2` function, with the name of the
+listener as first argument and the new value as the second.
+
+.Upgrading the maximum number of connections
+
+[source,erlang]
+ranch:set_max_connections(tcp_echo, MaxConns).
+
+The change will occur immediately.
+
+=== Using a supervisor for connection processes
+
+Ranch allows you to define the type of process that will be used
+for the connection processes. By default it expects a `worker`.
+When the `connection_type` configuration value is set to `supervisor`,
+Ranch will consider that the connection process it manages is a
+supervisor and will reflect that in its supervision tree.
+
+Connection processes of type `supervisor` can either handle the
+socket directly or through one of their children. In the latter
+case the start function for the connection process must return
+two pids: the pid of the supervisor you created (that will be
+supervised) and the pid of the protocol handling process (that
+will receive the socket).
+
+Instead of returning `{ok, ConnPid}`, simply return
+`{ok, SupPid, ConnPid}`.
+
+It is very important that the connection process be created
+under the supervisor process so that everything works as intended.
+If not, you will most likely experience issues when the supervised
+process is stopped.
+
+=== Upgrading
+
+Ranch allows you to upgrade the protocol options. This takes effect
+immediately and for all subsequent connections.
+
+To upgrade the protocol options, call `ranch:set_protocol_options/2`
+with the name of the listener as first argument and the new options
+as the second.
+
+.Upgrading the protocol options
+
+[source,erlang]
+ranch:set_protocol_options(tcp_echo, NewOpts).
+
+All future connections will use the new options.
+
+You can also retrieve the current options similarly by
+calling `ranch:get_protocol_options/1`.
+
+.Retrieving the current protocol options
+
+[source,erlang]
+Opts = ranch:get_protocol_options(tcp_echo).
diff --git a/deps/ranch/doc/src/guide/parsers.asciidoc b/deps/ranch/doc/src/guide/parsers.asciidoc
new file mode 100644 (file)
index 0000000..9eacbfa
--- /dev/null
@@ -0,0 +1,92 @@
+== Writing parsers
+
+There are three kinds of protocols:
+
+* Text protocols
+* Schema-less binary protocols
+* Schema-based binary protocols
+
+This chapter introduces the first two kinds. It will not cover
+more advanced topics such as continuations or parser generators.
+
+This chapter isn't specifically about Ranch, we assume here that
+you know how to read data from the socket. The data you read and
+the data that hasn't been parsed is saved in a buffer. Every
+time you read from the socket, the data read is appended to the
+buffer. What happens next depends on the kind of protocol. We
+will only cover the first two.
+
+=== Parsing text
+
+Text protocols are generally line based. This means that we can't
+do anything with them until we receive the full line.
+
+A simple way to get a full line is to use `binary:split/{2,3}`.
+
+.Using binary:split/2 to get a line of input
+
+[source,erlang]
+case binary:split(Buffer, <<"\n">>) of
+       [_] ->
+               get_more_data(Buffer);
+       [Line, Rest] ->
+               handle_line(Line, Rest)
+end.
+
+In the above example, we can have two results. Either there was
+a line break in the buffer and we get it split into two parts,
+the line and the rest of the buffer; or there was no line break
+in the buffer and we need to get more data from the socket.
+
+Next, we need to parse the line. The simplest way is to again
+split, here on space. The difference is that we want to split
+on all spaces character, as we want to tokenize the whole string.
+
+.Using binary:split/3 to split text
+
+[source,erlang]
+case binary:split(Line, <<" ">>, [global]) of
+       [<<"HELLO">>] ->
+               be_polite();
+       [<<"AUTH">>, User, Password] ->
+               authenticate_user(User, Password);
+       [<<"QUIT">>, Reason] ->
+               quit(Reason)
+       %% ...
+end.
+
+Pretty simple, right? Match on the command name, get the rest
+of the tokens in variables and call the respective functions.
+
+After doing this, you will want to check if there is another
+line in the buffer, and handle it immediately if any.
+Otherwise wait for more data.
+
+=== Parsing binary
+
+Binary protocols can be more varied, although most of them are
+pretty similar. The first four bytes of a frame tend to be
+the size of the frame, which is followed by a certain number
+of bytes for the type of frame and then various parameters.
+
+Sometimes the size of the frame includes the first four bytes,
+sometimes not. Other times this size is encoded over two bytes.
+And even other times little-endian is used instead of big-endian.
+
+The general idea stays the same though.
+
+.Using binary pattern matching to split frames
+
+[source,erlang]
+<< Size:32, _/bits >> = Buffer,
+case Buffer of
+       << Frame:Size/binary, Rest/bits >> ->
+               handle_frame(Frame, Rest);
+       _ ->
+               get_more_data(Buffer)
+end.
+
+You will then need to parse this frame using binary pattern
+matching, and handle it. Then you will want to check if there
+is another frame fully received in the buffer, and handle it
+immediately if any. Otherwise wait for more data.
diff --git a/deps/ranch/doc/src/guide/protocols.asciidoc b/deps/ranch/doc/src/guide/protocols.asciidoc
new file mode 100644 (file)
index 0000000..8060343
--- /dev/null
@@ -0,0 +1,125 @@
+== Protocols
+
+A protocol handler starts a connection process and defines the
+protocol logic executed in this process.
+
+=== Writing a protocol handler
+
+All protocol handlers must implement the `ranch_protocol` behavior
+which defines a single callback, `start_link/4`. This callback is
+responsible for spawning a new process for handling the connection.
+It receives four arguments: the name of the listener, the socket, the
+transport handler being used and the protocol options defined in
+the call to `ranch:start_listener/6`. This callback must
+return `{ok, Pid}`, with `Pid` the pid of the new process.
+
+The newly started process can then freely initialize itself. However,
+it must call `ranch:accept_ack/1` before doing any socket operation.
+This will ensure the connection process is the owner of the socket.
+It expects the listener's name as argument.
+
+.Acknowledge accepting the socket
+
+[source,erlang]
+ok = ranch:accept_ack(Ref).
+
+If your protocol code requires specific socket options, you should
+set them while initializing your connection process, after
+calling `ranch:accept_ack/1`. You can use `Transport:setopts/2`
+for that purpose.
+
+Following is the complete protocol code for the example found
+in `examples/tcp_echo/`.
+
+.Protocol module that echoes everything it receives
+
+[source,erlang]
+----
+-module(echo_protocol).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+init(Ref, Socket, Transport, _Opts = []) ->
+       ok = ranch:accept_ack(Ref),
+       loop(Socket, Transport).
+
+loop(Socket, Transport) ->
+       case Transport:recv(Socket, 0, 5000) of
+               {ok, Data} ->
+                       Transport:send(Socket, Data),
+                       loop(Socket, Transport);
+               _ ->
+                       ok = Transport:close(Socket)
+       end.
+----
+
+=== Using gen_server
+
+Special processes like the ones that use the `gen_server` or `gen_fsm`
+behaviours have the particularity of having their `start_link` call not
+return until the `init` function returns. This is problematic, because
+you won't be able to call `ranch:accept_ack/1` from the `init` callback
+as this would cause a deadlock to happen.
+
+There are two ways of solving this problem.
+
+The first, and probably the most elegant one, is to make use of the
+`gen_server:enter_loop/3` function. It allows you to start your process
+normally (although it must be started with `proc_lib` like all special
+processes), then perform any needed operations before falling back into
+the normal `gen_server` execution loop.
+
+.Use a gen_server for protocol handling
+
+[source,erlang]
+----
+-module(my_protocol).
+-behaviour(gen_server).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+%% Exports of other gen_server callbacks here.
+
+start_link(Ref, Socket, Transport, Opts) ->
+       proc_lib:start_link(?MODULE, init, [Ref, Socket, Transport, Opts]).
+
+init(Ref, Socket, Transport, _Opts = []) ->
+       ok = proc_lib:init_ack({ok, self()}),
+       %% Perform any required state initialization here.
+       ok = ranch:accept_ack(Ref),
+       ok = Transport:setopts(Socket, [{active, once}]),
+       gen_server:enter_loop(?MODULE, [], {state, Socket, Transport}).
+
+%% Other gen_server callbacks here.
+----
+
+The second method involves triggering a timeout just after `gen_server:init`
+ends. If you return a timeout value of `0` then the `gen_server` will call
+`handle_info(timeout, _, _)` right away.
+
+.Use a gen_server for protocol handling, method 2
+
+[source,erlang]
+----
+-module(my_protocol).
+-behaviour(gen_server).
+-behaviour(ranch_protocol).
+
+%% Exports go here.
+
+init([Ref, Socket, Transport]) ->
+       {ok, {state, Ref, Socket, Transport}, 0}.
+
+handle_info(timeout, State={state, Ref, Socket, Transport}) ->
+       ok = ranch:accept_ack(Ref),
+       ok = Transport:setopts(Socket, [{active, once}]),
+       {noreply, State};
+%% ...
+----
diff --git a/deps/ranch/doc/src/guide/ssl_auth.asciidoc b/deps/ranch/doc/src/guide/ssl_auth.asciidoc
new file mode 100644 (file)
index 0000000..39f9c3c
--- /dev/null
@@ -0,0 +1,120 @@
+== SSL client authentication
+
+=== Purpose
+
+SSL client authentication is a mechanism allowing applications to
+identify certificates. This allows your application to make sure that
+the client is an authorized certificate, but makes no claim about
+whether the user can be trusted. This can be combined with a password
+based authentication to attain greater security.
+
+The server only needs to retain the certificate serial number and
+the certificate issuer to authenticate the certificate. Together,
+they can be used to uniquely identify a certicate.
+
+As Ranch allows the same protocol code to be used for both SSL and
+non-SSL transports, you need to make sure you are in an SSL context
+before attempting to perform an SSL client authentication. This
+can be done by checking the return value of `Transport:name/0`.
+
+=== Obtaining client certificates
+
+You can obtain client certificates from various sources. You can
+generate them yourself, or you can use a service like CAcert.org
+which allows you to generate client and server certificates for
+free.
+
+Following are the steps you need to take to create a CAcert.org
+account, generate a certificate and install it in your favorite
+browser.
+
+* Open [CAcert.org](http://cacert.org) in your favorite browser
+* Root Certificate link: install both certificates
+* Join (Register an account)
+* Verify your account (check your email inbox!)
+* Log in
+* Client Certificates: New
+* Follow instructions to create the certificate
+* Install the certificate in your browser
+
+You can optionally save the certificate for later use, for example
+to extract the `IssuerID` information as will be detailed later on.
+
+=== Transport configuration
+
+The SSL transport does not request a client certificate by default.
+You need to specify the `{verify, verify_peer}` option when starting
+the listener to enable this behavior.
+
+.Configure a listener for SSL authentication
+
+[source,erlang]
+{ok, _} = ranch:start_listener(my_ssl, 100,
+       ranch_ssl, [
+               {port, SSLPort},
+               {certfile, PathToCertfile},
+               {cacertfile, PathToCACertfile},
+               {verify, verify_peer}
+       ],
+       my_protocol, []
+).
+
+In this example we set the required `port` and `certfile`, but also
+the `cacertfile` containing the CACert.org root certificate, and
+the option to request the client certificate.
+
+If you enable the `{verify, verify_peer}` option and the client does
+not have a client certificate configured for your domain, then no
+certificate will be sent. This allows you to use SSL for more than
+just authenticated clients.
+
+=== Authentication
+
+To authenticate users, you must first save the certificate information
+required. If you have your users' certificate files, you can simply
+load the certificate and retrieve the information directly.
+
+.Retrieve the issuer ID from a certificate
+
+[source,erlang]
+----
+certfile_to_issuer_id(Filename) ->
+       {ok, Data} = file:read_file(Filename),
+       [{'Certificate', Cert, not_encrypted}] = public_key:pem_decode(Data),
+       {ok, IssuerID} = public_key:pkix_issuer_id(Cert, self),
+       IssuerID.
+----
+
+The `IssuerID` variable contains both the certificate serial number
+and the certificate issuer stored in a tuple, so this value alone can
+be used to uniquely identify the user certificate. You can save this
+value in a database, a configuration file or any other place where an
+Erlang term can be stored and retrieved.
+
+To retrieve the `IssuerID` from a running connection, you need to first
+retrieve the client certificate and then extract this information from
+it. Ranch does not provide a function to retrieve the client certificate.
+Instead you can use the `ssl:peercert/1` function. Once you have the
+certificate, you can again use the `public_key:pkix_issuer_id/2` to
+extract the `IssuerID` value.
+
+The following function returns the `IssuerID` or `false` if no client
+certificate was found. This snippet is intended to be used from your
+protocol code.
+
+.Retrieve the issuer ID from the certificate for the current connection
+
+[source,erlang]
+----
+socket_to_issuer_id(Socket) ->
+       case ssl:peercert(Socket) of
+               {error, no_peercert} ->
+                       false;
+               {ok, Cert} ->
+                       {ok, IssuerID} = public_key:pkix_issuer_id(Cert, self),
+                       IssuerID
+       end.
+----
+
+You then only need to match the `IssuerID` value to authenticate the
+user.
diff --git a/deps/ranch/doc/src/guide/transports.asciidoc b/deps/ranch/doc/src/guide/transports.asciidoc
new file mode 100644 (file)
index 0000000..9195376
--- /dev/null
@@ -0,0 +1,169 @@
+== Transports
+
+A transport defines the interface to interact with a socket.
+
+Transports can be used for connecting, listening and accepting
+connections, but also for receiving and sending data. Both
+passive and active mode are supported, although all sockets
+are initialized as passive.
+
+=== TCP transport
+
+The TCP transport is a thin wrapper around `gen_tcp`.
+
+=== SSL transport
+
+The SSL transport is a thin wrapper around `ssl`. It requires
+the `crypto`, `asn1`, `public_key` and `ssl` applications
+to be started. When starting an SSL listener, Ranch will attempt
+to automatically start them. It will not try to stop them when
+the listener is removed, however.
+
+.Starting the SSL application
+
+[source,erlang]
+ssl:start().
+
+In a proper OTP setting, you will need to make your application
+depend on the `crypto`, `public_key` and `ssl` applications.
+They will be started automatically when starting your release.
+
+The SSL transport `accept/2` function performs both transport
+and SSL accepts. Errors occurring during the SSL accept phase
+are returned as `{error, {ssl_accept, atom()}}` to differentiate
+on which socket the problem occurred.
+
+=== Sending and receiving data
+
+This section assumes that `Transport` is a valid transport handler
+(like `ranch_tcp` or `ranch_ssl`) and `Socket` is a connected
+socket obtained through the listener.
+
+You can send data to a socket by calling the `Transport:send/2`
+function. The data can be given as `iodata()`, which is defined as
+`binary() | iolist()`. All the following calls will work:
+
+.Sending data to the socket
+
+[source,erlang]
+----
+Transport:send(Socket, <<"Ranch is cool!">>).
+Transport:send(Socket, "Ranch is cool!").
+Transport:send(Socket, ["Ranch", ["is", "cool!"]]).
+Transport:send(Socket, ["Ranch", [<<"is">>, "cool!"]]).
+----
+
+You can receive data either in passive or in active mode. Passive mode
+means that you will perform a blocking `Transport:recv/3` call, while
+active mode means that you will receive the data as a message.
+
+By default, all data will be received as binary. It is possible to
+receive data as strings, although this is not recommended as binaries
+are a more efficient construct, especially for binary protocols.
+
+Receiving data using passive mode requires a single function call. The
+first argument is the socket, and the third argument is a timeout duration
+before the call returns with `{error, timeout}`.
+
+The second argument is the amount of data in bytes that we want to receive.
+The function will wait for data until it has received exactly this amount.
+If you are not expecting a precise size, you can specify 0 which will make
+this call return as soon as data was read, regardless of its size.
+
+.Receiving data from the socket in passive mode
+
+[source,erlang]
+{ok, Data} = Transport:recv(Socket, 0, 5000).
+
+Active mode requires you to inform the socket that you want to receive
+data as a message and to write the code to actually receive it.
+
+There are two kinds of active modes: `{active, once}` and
+`{active, true}`. The first will send a single message before going
+back to passive mode; the second will send messages indefinitely.
+We recommend not using the `{active, true}` mode as it could quickly
+flood your process mailbox. It's better to keep the data in the socket
+and read it only when required.
+
+Three different messages can be received:
+
+* `{OK, Socket, Data}`
+* `{Closed, Socket}`
+* `{Error, Socket, Reason}`
+
+The value of `OK`, `Closed` and `Error` can be different
+depending on the transport being used. To be able to properly match
+on them you must first call the `Transport:messages/0` function.
+
+.Retrieving the transport's active message identifiers
+
+[source,erlang]
+{OK, Closed, Error} = Transport:messages().
+
+To start receiving messages you will need to call the `Transport:setopts/2`
+function, and do so every time you want to receive data.
+
+.Receiving messages from the socket in active mode
+
+[source,erlang]
+----
+{OK, Closed, Error} = Transport:messages(),
+Transport:setopts(Socket, [{active, once}]),
+receive
+       {OK, Socket, Data} ->
+               io:format("data received: ~p~n", [Data]);
+       {Closed, Socket} ->
+               io:format("socket got closed!~n");
+       {Error, Socket, Reason} ->
+               io:format("error happened: ~p~n", [Reason])
+end.
+----
+
+You can easily integrate active sockets with existing Erlang code as all
+you really need is just a few more clauses when receiving messages.
+
+=== Sending files
+
+As in the previous section it is assumed `Transport` is a valid transport
+handler and `Socket` is a connected socket obtained through the listener.
+
+To send a whole file, with name `Filename`, over a socket:
+
+.Sending a file by filename
+
+[source,erlang]
+{ok, SentBytes} = Transport:sendfile(Socket, Filename).
+
+Or part of a file, with `Offset` greater than or equal to 0, `Bytes` number of
+bytes and chunks of size `ChunkSize`:
+
+.Sending part of a file by filename in chunks
+
+[source,erlang]
+Opts = [{chunk_size, ChunkSize}],
+{ok, SentBytes} = Transport:sendfile(Socket, Filename, Offset, Bytes, Opts).
+
+To improve efficiency when sending multiple parts of the same file it is also
+possible to use a file descriptor opened in raw mode:
+
+.Sending a file opened in raw mode
+
+[source,erlang]
+{ok, RawFile} = file:open(Filename, [raw, read, binary]),
+{ok, SentBytes} = Transport:sendfile(Socket, RawFile, Offset, Bytes, Opts).
+
+=== Writing a transport handler
+
+A transport handler is a module implementing the `ranch_transport` behavior.
+It defines a certain number of callbacks that must be written in order to
+allow transparent usage of the transport handler.
+
+The behavior doesn't define the socket options available when opening a
+socket. These do not need to be common to all transports as it's easy enough
+to write different initialization functions for the different transports that
+will be used. With one exception though. The `setopts/2` function *must*
+implement the `{active, once}` and the `{active, true}` options.
+
+If the transport handler doesn't have a native implementation of `sendfile/5` a
+fallback is available, `ranch_transport:sendfile/6`. The extra first argument
+is the transport's module. See `ranch_ssl` for an example.
diff --git a/deps/ranch/doc/src/manual/ranch.asciidoc b/deps/ranch/doc/src/manual/ranch.asciidoc
new file mode 100644 (file)
index 0000000..22ab159
--- /dev/null
@@ -0,0 +1,178 @@
+= ranch(3)
+
+== Name
+
+ranch - socket acceptor pool
+
+== Description
+
+The `ranch` module provides functions for starting and
+manipulating Ranch listeners.
+
+== Types
+
+=== max_conns() = non_neg_integer() | infinity
+
+Maximum number of connections allowed on this listener.
+
+This is a soft limit. The actual number of connections
+might be slightly above the limit due to concurrency
+when accepting new connections. Some connections may
+also be removed from this count explicitly by the user
+code.
+
+=== opt()
+
+[source,erlang]
+----
+opt() = {ack_timeout, timeout()}
+       | {connection_type, worker | supervisor}
+       | {max_connections, max_conns()}
+       | {shutdown, timeout() | brutal_kill}
+       | {socket, any()}
+----
+
+Ranch-specific transport options.
+
+These options are not passed on to the transports.
+They are used by Ranch while setting up the listeners.
+
+=== ref() = any()
+
+Unique name used to refer to a listener.
+
+== Option descriptions
+
+None of the options are required.
+
+ack_timeout (5000)::
+       Maximum allowed time for the `ranch:accept_ack/1` call to finish.
+connection_type (worker)::
+       Type of process that will handle the connection.
+max_connections (1024)::
+       Maximum number of active connections. Soft limit. Using `infinity` will disable the limit entirely.
+shutdown (5000)::
+       Maximum allowed time for children to stop on listener shutdown.
+socket::
+       Listening socket opened externally to be used instead of calling `Transport:listen/1`.
+
+== Exports
+
+=== accept_ack(Ref) -> ok
+
+Ref = ref():: Listener name.
+
+Acknowledge that the connection is accepted.
+
+This function MUST be used by a connection process to inform
+Ranch that it initialized properly and let it perform any
+additional operations before the socket can be safely used.
+
+=== child_spec(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) -> supervisor:child_spec()
+
+Ref = ref():: Listener name.
+NbAcceptors = non_neg_integer():: Number of acceptor processes.
+Transport = module():: Transport module.
+TransOpts = any():: Transport options.
+Protocol = module():: Protocol module.
+ProtoOpts = any():: Protocol options.
+
+Return child specifications for a new listener.
+
+This function can be used to embed a listener directly
+in an application instead of letting Ranch handle it.
+
+=== get_addr(Ref) -> {IP, Port}
+
+Ref = ref():: Listener name.
+IP = inet:ip_address():: IP of the interface used by this listener.
+Port = inet:port_number():: Port number used by this listener.
+
+Return the IP address and port for the given listener.
+
+=== get_max_connections(Ref) -> MaxConns
+
+Ref = ref():: Listener name.
+MaxConns = max_conns():: Current maximum number of connections.
+
+Return the max number of connections allowed for the given listener.
+
+=== get_port(Ref) -> Port
+
+Ref = ref():: Listener name.
+Port = inet:port_number():: Port number used by this listener.
+
+Return the port for the given listener.
+
+=== get_protocol_options(Ref) -> ProtoOpts
+
+Ref = ref():: Listener name.
+ProtoOpts = any():: Current protocol options.
+
+Return the protocol options set for the given listener.
+
+=== remove_connection(Ref) -> ok
+
+Ref = ref():: Listener name.
+
+Do not count this connection when limiting the number of connections.
+
+You can use this function for long-running connection processes
+which spend most of their time idling rather than consuming
+resources. This allows Ranch to accept a lot more connections
+without sacrificing the latency of the system.
+
+This function may only be called from a connection process.
+
+=== set_max_connections(Ref, MaxConns) -> ok
+
+Ref = ref():: Listener name.
+MaxConns = max_conns():: New maximum number of connections.
+
+Set the max number of connections for the given listener.
+
+The change will be applied immediately. If the new value is
+smaller than the previous one, Ranch will not kill the extra
+connections, but will wait for them to terminate properly.
+
+=== set_protocol_options(Ref, ProtoOpts) -> ok
+
+Ref = ref():: Listener name.
+ProtoOpts = any():: New protocol options.
+
+Set the protocol options for the given listener.
+
+The change will be applied immediately for all new connections.
+Old connections will not receive the new options.
+
+=== start_listener(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) -> {ok, pid()} | {error, badarg}
+
+Ref = ref():: Listener name.
+NbAcceptors = non_neg_integer():: Number of acceptor processes.
+Transport = module():: Transport module.
+TransOpts = any():: Transport options.
+Protocol = module():: Protocol module.
+ProtoOpts = any():: Protocol options.
+
+Start listening for connections using the given transport
+and protocol. Returns the pid for this listener's supervisor.
+
+There are additional transport options that apply
+regardless of transport. They allow configuring how the
+connections are supervised, rate limited and more. Please
+consult the previous section for more details.
+
+=== stop_listener(Ref) -> ok | {error, not_found}
+
+Ref = ref():: Listener name.
+
+Stop the given listener.
+
+The listener is stopped gracefully, first by closing the
+listening port, then by stopping the connection processes.
+These processes are stopped according to the `shutdown`
+transport option, which may be set to brutally kill all
+connection processes or give them some time to stop properly.
+
+This function does not return until the listener is
+completely stopped.
diff --git a/deps/ranch/doc/src/manual/ranch_app.asciidoc b/deps/ranch/doc/src/manual/ranch_app.asciidoc
new file mode 100644 (file)
index 0000000..2edfc72
--- /dev/null
@@ -0,0 +1,27 @@
+= ranch(7)
+
+== Name
+
+ranch - Socket acceptor pool for TCP protocols.
+
+== Dependencies
+
+The `ranch` application has no particular dependency required
+to start.
+
+It has optional dependencies that are only required when
+listening for SSL connections. The dependencies are `crypto`,
+`asn1`, `public_key` and `ssl`. They are started automatically
+if they weren't before.
+
+== Environment
+
+The `ranch` application defines one application environment
+configuration parameter.
+
+profile (false)::
+       When enabled, Ranch will start `eprof` profiling automatically.
+
+You can use the `ranch_app:profile_output/0` function to stop
+profiling and output the results to the files 'procs.profile'
+and 'total.profile'. Do not use in production.
diff --git a/deps/ranch/doc/src/manual/ranch_protocol.asciidoc b/deps/ranch/doc/src/manual/ranch_protocol.asciidoc
new file mode 100644 (file)
index 0000000..714a82b
--- /dev/null
@@ -0,0 +1,44 @@
+= ranch_protocol(3)
+
+== Name
+
+ranch_protocol - behaviour for protocol modules
+
+== Description
+
+The `ranch_protocol` behaviour defines the interface used
+by Ranch protocols.
+
+== Types
+
+None.
+
+== Callbacks
+
+=== start_link(Ref, Socket, Transport, ProtoOpts) -> {ok, pid()} | {ok, pid(), pid()}
+
+Ref = ranch:ref():: Listener name.
+Socket = any():: Socket for this connection.
+Transport = module():: Transport module for this socket.
+ProtoOpts = any():: Protocol options.
+
+Start a new connection process for the given socket.
+
+The only purpose of this callback is to start a process that
+will handle the socket. It must spawn the process, link and
+then return the new pid. This function will always be called
+from inside a supervisor.
+
+This callback can also return two pids. The first pid is the
+pid of the process that will be supervised. The second pid is
+the pid of the process that will receive ownership of the
+socket. This second process must be a child of the first. This
+form is only available when `connection_type` is set to
+`supervisor`.
+
+If any other value is returned, the supervisor will close the
+socket and assume no process has been started.
+
+Do not perform any operations in this callback, as this would
+block the supervisor responsible for starting connection
+processes and degrade performance severely.
diff --git a/deps/ranch/doc/src/manual/ranch_ssl.asciidoc b/deps/ranch/doc/src/manual/ranch_ssl.asciidoc
new file mode 100644 (file)
index 0000000..55accad
--- /dev/null
@@ -0,0 +1,142 @@
+= ranch_ssl(3)
+
+== Name
+
+ranch_ssl - SSL transport module
+
+== Description
+
+The `ranch_ssl` module implements an SSL Ranch transport.
+
+== Types
+
+=== ssl_opt()
+
+[source,erlang]
+----
+ssl_opt() = {alpn_preferred_protocols, [binary()]}
+       | {cacertfile, string()}
+       | {cacerts, [public_key:der_encoded()]}
+       | {cert, public_key:der_encoded()}
+       | {certfile, string()}
+       | {ciphers, [ssl:erl_cipher_suite()] | string()}
+       | {client_renegotiation, boolean()}
+       | {crl_cache, {module(), {internal | any(), list()}}}
+       | {crl_check, boolean() | peer | best_effort}
+       | {depth, 0..255}
+       | {dh, public_key:der_encoded()}
+       | {dhfile, string()}
+       | {fail_if_no_peer_cert, boolean()}
+       | {hibernate_after, integer() | undefined}
+       | {honor_cipher_order, boolean()}
+       | {key, {'RSAPrivateKey' | 'DSAPrivateKey' | 'PrivateKeyInfo', public_key:der_encoded()}}
+       | {keyfile, string()}
+       | {log_alert, boolean()}
+       | {next_protocols_advertised, [binary()]}
+       | {partial_chain, fun(([public_key:der_encoded()]) -> {trusted_ca, public_key:der_encoded()} | unknown_ca)}
+       | {password, string()}
+       | {psk_identity, string()}
+       | {reuse_session, fun()}
+       | {reuse_sessions, boolean()}
+       | {secure_renegotiate, boolean()}
+       | {sni_fun, fun()}
+       | {sni_hosts, [{string(), ssl_opt()}]}
+       | {user_lookup_fun, {fun(), any()}}
+       | {verify, ssl:verify_type()}
+       | {verify_fun, {fun(), any()}}
+       | {versions, [atom()]}.
+----
+
+SSL-specific listen options.
+
+=== opt() = ranch_tcp:opt() | ssl_opt()
+
+Listen options.
+
+=== opts() = [opt()]
+
+List of listen options.
+
+== Option descriptions
+
+Specifying a certificate is mandatory, either through the `cert`
+or the `certfile` option. None of the other options are required.
+
+The default value is given next to the option name.
+
+alpn_preferred_protocols::
+       Perform Application-Layer Protocol Negotiation with the given list of preferred protocols.
+cacertfile::
+       Path to PEM encoded trusted certificates file used to verify peer certificates.
+cacerts::
+       List of DER encoded trusted certificates.
+cert::
+       DER encoded user certificate.
+certfile::
+       Path to the PEM encoded user certificate file. May also contain the private key.
+ciphers::
+       List of ciphers that clients are allowed to use.
+client_renegotiation (true)::
+       Whether to allow client-initiated renegotiation.
+crl_cache ({ssl_crl_cache, {internal, []}})::
+       Customize the module used to cache Certificate Revocation Lists.
+crl_check (false)::
+       Whether to perform CRL check on all certificates in the chain during validation.
+depth (1)::
+       Maximum of intermediate certificates allowed in the certification path.
+dh::
+       DER encoded Diffie-Hellman parameters.
+dhfile::
+       Path to the PEM encoded Diffie-Hellman parameters file.
+fail_if_no_peer_cert (false)::
+       Whether to refuse the connection if the client sends an empty certificate.
+hibernate_after (undefined)::
+       Time in ms after which SSL socket processes go into hibernation to reduce memory usage.
+honor_cipher_order (false)::
+       If true, use the server's preference for cipher selection. If false, use the client's preference.
+key::
+       DER encoded user private key.
+keyfile::
+       Path to the PEM encoded private key file, if different than the certfile.
+log_alert (true)::
+       If false, error reports will not be displayed.
+next_protocols_advertised::
+       List of protocols to send to the client if it supports the Next Protocol extension.
+nodelay (true)::
+       Whether to enable TCP_NODELAY.
+partial_chain::
+       Claim an intermediate CA in the chain as trusted.
+password::
+       Password to the private key file, if password protected.
+psk_identity::
+       Provide the given PSK identity hint to the client during the handshake.
+reuse_session::
+       Custom policy to decide whether a session should be reused.
+reuse_sessions (false)::
+       Whether to allow session reuse.
+secure_renegotiate (false)::
+       Whether to reject renegotiation attempts that do not conform to RFC5746.
+sni_fun::
+       Function called when the client requests a host using Server Name Indication. Returns options to apply.
+sni_hosts::
+       Options to apply for the host that matches what the client requested with Server Name Indication.
+user_lookup_fun::
+       Function called to determine the shared secret when using PSK, or provide parameters when using SRP.
+verify (verify_none)::
+       Use `verify_peer` to request a certificate from the client.
+verify_fun::
+       Custom policy to decide whether a client certificate is valid.
+versions::
+       TLS protocol versions that will be supported.
+
+Note that the client will not send a certificate unless the
+value for the `verify` option is set to `verify_peer`. This
+means that the `fail_if_no_peer_cert` only apply when combined
+with the `verify` option. The `verify_fun` option allows
+greater control over the client certificate validation.
+
+The options `sni_fun` and `sni_hosts` are mutually exclusive.
+
+== Exports
+
+None.
diff --git a/deps/ranch/doc/src/manual/ranch_tcp.asciidoc b/deps/ranch/doc/src/manual/ranch_tcp.asciidoc
new file mode 100644 (file)
index 0000000..1fc268d
--- /dev/null
@@ -0,0 +1,123 @@
+= ranch_tcp(3)
+
+== Name
+
+ranch_tcp - TCP transport module
+
+== Description
+
+The `ranch_tcp` module implements a TCP Ranch transport.
+
+Note that due to bugs in OTP up to at least R16B02, it is
+recommended to disable async threads when using the
+`sendfile` function of this transport, as it can make
+the threads stuck indefinitely.
+
+== Types
+
+=== opt()
+
+[source,erlang]
+----
+opt() = {backlog, non_neg_integer()}
+       | {buffer, non_neg_integer()}
+       | {delay_send, boolean()}
+       | {dontroute, boolean()}
+       | {exit_on_close, boolean()}
+       | {fd, non_neg_integer()}
+       | {high_msgq_watermark, non_neg_integer()}
+       | {high_watermark, non_neg_integer()}
+       | inet
+       | inet6
+       | {ip, inet:ip_address()}
+       | {keepalive, boolean()}
+       | {linger, {boolean(), non_neg_integer()}}
+       | {low_msgq_watermark, non_neg_integer()}
+       | {low_watermark, non_neg_integer()}
+       | {nodelay, boolean()}
+       | {port, inet:port_number()}
+       | {priority, integer()}
+       | {raw, non_neg_integer(), non_neg_integer(), binary()}
+       | {recbuf, non_neg_integer()}
+       | {send_timeout, timeout()}
+       | {send_timeout_close, boolean()}
+       | {sndbuf, non_neg_integer()}
+       | {tos, integer()}
+----
+
+Listen options.
+
+This does not represent the entirety of the options that can
+be set on the socket, but only the options that may be
+set independently of protocol implementation.
+
+=== opts() = [opt()]
+
+List of listen options.
+
+Option descriptions
+-------------------
+
+None of the options are required.
+
+Please consult the `gen_tcp` and `inet` manuals for a more
+thorough description of these options. This manual only aims
+to provide a short description along with what the defaults
+are. Defaults may be different in Ranch compared to `gen_tcp`.
+Defaults are given next to the option name.
+
+backlog (1024)::
+       Max length of the queue of pending connections.
+buffer::
+       Size of the buffer used by the Erlang driver. Default is system-dependent.
+delay_send (false)::
+       Always queue packets before sending, to send fewer, larger packets over the network.
+dontroute (false)::
+       Don't send via a gateway, only send to directly connected hosts.
+exit_on_close (true)::
+       Disable to allow sending data after a close has been detected.
+fd::
+       File descriptor of the socket, if it was opened externally.
+high_msgq_watermark (8192)::
+       Limit in the amount of data in the socket message queue before the socket queue becomes busy.
+high_watermark (8192)::
+       Limit in the amount of data in the ERTS socket implementation's queue before the socket becomes busy.
+inet::
+       Set up the socket for IPv4.
+inet6::
+       Set up the socket for IPv6.
+ip::
+       Interface to listen on. Listen on all interfaces by default.
+keepalive (false)::
+       Enable sending of keep-alive messages.
+linger ({false, 0})::
+       Whether to wait and how long to flush data sent before closing the socket.
+low_msgq_watermark (4096)::
+       Amount of data in the socket message queue before the socket queue leaves busy state.
+low_watermark (4096)::
+       Amount of data in the ERTS socket implementation's queue before the socket leaves busy state.
+nodelay (true)::
+       Whether to enable TCP_NODELAY.
+port (0)::
+       TCP port number to listen on. 0 means a random port will be used.
+priority (0)::
+       Priority value for all packets to be sent by this socket.
+recbuf::
+       Minimum size of the socket's receive buffer. Default is system-dependent.
+send_timeout (30000)::
+       How long the send call may wait for confirmation before returning.
+send_timeout_close (true)::
+       Whether to close the socket when the confirmation wasn't received.
+sndbuf::
+       Minimum size of the socket's send buffer. Default is system-dependent.
+tos::
+       Value for the IP_TOS IP level option. Use with caution.
+
+In addition, the `raw` option can be used to set system-specific
+options by specifying the protocol level, the option number and
+the actual option value specified as a binary. This option is not
+portable. Use with caution.
+
+== Exports
+
+None.
diff --git a/deps/ranch/doc/src/manual/ranch_transport.asciidoc b/deps/ranch/doc/src/manual/ranch_transport.asciidoc
new file mode 100644 (file)
index 0000000..a9322f4
--- /dev/null
@@ -0,0 +1,194 @@
+= ranch_transport(3)
+
+== Name
+
+ranch_transport - behaviour for transport modules
+
+== Description
+
+The `ranch_transport` behaviour defines the interface used
+by Ranch transports.
+
+== Types
+
+=== sendfile_opts() = [{chunk_size, non_neg_integer()}]
+
+Options used by the sendfile function and callbacks.
+
+Allows configuring the chunk size, in bytes. Defaults to 8191 bytes.
+
+== Callbacks
+
+=== accept(LSocket, Timeout) -> {ok, CSocket} | {error, closed | timeout | atom()}
+
+LSocket = CSocket = any():: Listening socket.
+Timeout = timeout():: Accept timeout.
+
+Accept a connection on the given listening socket.
+
+The `accept_ack` callback will be used to initialize the socket
+after accepting the connection. This is most useful when the
+transport is not raw TCP, like with SSL for example.
+
+=== accept_ack(CSocket, Timeout) -> ok
+
+CSocket = any():: Socket for this connection.
+Timeout = timeout():: Ack timeout.
+
+Perform post-accept initialization of the connection.
+
+This function will be called by connection processes
+before performing any socket operation. It allows
+transports that require extra initialization to perform
+their task and make the socket ready to use.
+
+=== close(Socket) -> ok
+
+Socket = any():: Socket opened with listen/1 or accept/2.
+
+Close the given socket.
+
+=== controlling_process(Socket, Pid) -> ok | {error, closed | not_owner | atom()}
+
+Socket = any():: Socket opened with listen/1 or accept/2.
+Pid = pid():: Pid of the new owner of the socket.
+
+Change the controlling process for the given socket.
+
+The controlling process is the process that is allowed to
+perform operations on the socket, and that will receive
+messages from the socket when active mode is used. When
+the controlling process dies, the socket is closed.
+
+=== listen(TransOpts) -> {ok, LSocket} | {error, atom()}
+
+TransOpts = any():: Transport options.
+LSocket = any():: Listening socket.
+
+Listen for connections on the given port.
+
+The port is given as part of the transport options under
+the key `port`. Any other option is transport dependent.
+
+The socket returned by this call can then be used to
+accept connections. It is not possible to send or receive
+data from the listening socket.
+
+=== messages() -> {OK, Closed, Error}
+
+OK = Closed = Error = atom():: Tuple names.
+
+Return the atoms used to identify messages sent in active mode.
+
+=== name() -> Name
+
+Name = atom():: Transport module name.
+
+Return the name of the transport.
+
+=== peername(CSocket) -> {ok, {IP, Port}} | {error, atom()}
+
+CSocket = any():: Socket for this connection.
+IP = inet:ip_address():: IP of the remote endpoint.
+Port = inet:port_number():: Port of the remote endpoint.
+
+Return the IP and port of the remote endpoint.
+
+=== recv(CSocket, Length, Timeout) -> {ok, Packet} | {error, closed | timeout | atom()}
+
+CSocket = any():: Socket for this connection.
+Length = non_neg_integer():: Requested length.
+Timeout = timeout():: Receive timeout.
+Packet = iodata() | any():: Data received.
+
+Receive data from the given socket when in passive mode.
+
+Trying to receive data from a socket that is in active mode
+will return an error.
+
+A length of 0 will return any data available on the socket.
+
+While it is possible to use the timeout value `infinity`,
+this is highly discouraged as this could cause your process
+to get stuck waiting for data that will never come. This may
+happen when a socket becomes half-open due to a crash of the
+remote endpoint. Wi-Fi going down is another common culprit
+of this issue.
+
+=== send(CSocket, Packet) -> ok | {error, atom()}
+
+CSocket = any():: Socket for this connection.
+Packet = iodata():: Data to be sent.
+
+Send data to the given socket.
+
+=== sendfile(CSocket, File) -> sendfile(CSocket, File, 0, 0, [])
+
+Alias of `ranch_transport:sendfile/5`.
+
+=== sendfile(CSocket, File, Offset, Bytes) -> sendfile(CSocket, File, Offset, Bytes, [])
+
+Alias of `ranch_transport:sendfile/5`.
+
+=== sendfile(CSocket, File, Offset, Bytes, SfOpts) -> {ok, SentBytes} | {error, atom()}
+
+CSocket = any():: Socket for this connection.
+File = file:filename_all() | file:fd():: Filename or file descriptor for the file to be sent.
+Offset = non_neg_integer():: Begin sending at this position in the file.
+Bytes = non_neg_integer():: Send this many bytes.
+SentBytes = non_neg_integer():: This many bytes were sent.
+SfOpts = sendfile_opts():: Sendfile options.
+
+Send data from a file to the given socket.
+
+The file may be sent full or in parts, and may be specified
+by its filename or by an already open file descriptor.
+
+Transports that manipulate TCP directly may use the
+`file:sendfile/{2,4,5}` function, which calls the sendfile
+syscall where applicable (on Linux, for example). Other
+transports can use the `sendfile/6` function exported from
+this module.
+
+=== setopts(CSocket, SockOpts) -> ok | {error, atom()}
+
+CSocket = any():: Socket for this connection.
+SockOpts = any():: Socket options.
+
+Change options for the given socket.
+
+This is mainly useful for switching to active or passive mode
+or to set protocol-specific options.
+
+=== shutdown(CSocket, How) -> ok | {error, atom()}
+
+CSocket = any():: Socket for this connection.
+How = read | write | read_write:: Which side(s) of the socket to close.
+
+Immediately close the socket in one or two directions.
+
+=== sockname(Socket) -> {ok, {IP, Port}} | {error, atom()}
+
+Socket = any():: Socket opened with listen/1 or accept/2.
+IP = inet:ip_address():: IP of the local endpoint.
+Port = inet:port_number():: Port of the local endpoint.
+
+Return the IP and port of the local endpoint.
+
+== Exports
+
+=== sendfile(Transport, CSocket, File, Offset, Bytes, SfOpts) -> {ok, SentBytes} | {error, atom()}
+
+Transport = module():: Transport module for this socket.
+CSocket = any():: Socket for this connection.
+File = file:filename_all() | file:fd():: Filename or file descriptor for the file to be sent.
+Offset = non_neg_integer():: Begin sending at this position in the file.
+Bytes = non_neg_integer():: Send this many bytes.
+SentBytes = non_neg_integer():: This many bytes were sent.
+SfOpts = sendfile_opts():: Sendfile options.
+
+Send data from a file to the given socket.
+
+This function emulates the function `file:sendfile/{2,4,5}`
+and may be used when transports are not manipulating TCP
+directly.
diff --git a/deps/ranch/erlang.mk b/deps/ranch/erlang.mk
new file mode 100644 (file)
index 0000000..8930dfc
--- /dev/null
@@ -0,0 +1 @@
+include ../../erlang.mk
diff --git a/deps/ranch/examples/tcp_echo/Makefile b/deps/ranch/examples/tcp_echo/Makefile
new file mode 100644 (file)
index 0000000..60c08ab
--- /dev/null
@@ -0,0 +1,3 @@
+PROJECT = tcp_echo
+DEPS = ranch
+include ../../erlang.mk
diff --git a/deps/ranch/examples/tcp_echo/README.md b/deps/ranch/examples/tcp_echo/README.md
new file mode 100644 (file)
index 0000000..0a5fa1d
--- /dev/null
@@ -0,0 +1,27 @@
+Ranch TCP echo example
+======================
+
+To try this example, you need GNU `make` and `git` in your PATH.
+
+To build the example, run the following command:
+
+``` bash
+$ make
+```
+
+To start the release in the foreground:
+
+``` bash
+$ ./_rel/tcp_echo_example/bin/tcp_echo_example console
+```
+
+Then start a telnet session to port 5555:
+
+``` bash
+$ telnet localhost 5555
+```
+
+Type in a few words and see them echoed back.
+
+Be aware that there is a timeout of 5 seconds without receiving
+data before the example server disconnects your session.
diff --git a/deps/ranch/examples/tcp_echo/relx.config b/deps/ranch/examples/tcp_echo/relx.config
new file mode 100644 (file)
index 0000000..a850b71
--- /dev/null
@@ -0,0 +1,2 @@
+{release, {tcp_echo_example, "1"}, [tcp_echo]}.
+{extended_start_script, true}.
diff --git a/deps/ranch/examples/tcp_echo/src/echo_protocol.erl b/deps/ranch/examples/tcp_echo/src/echo_protocol.erl
new file mode 100644 (file)
index 0000000..5ed79b3
--- /dev/null
@@ -0,0 +1,24 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+-module(echo_protocol).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+init(Ref, Socket, Transport, _Opts = []) ->
+       ok = ranch:accept_ack(Ref),
+       loop(Socket, Transport).
+
+loop(Socket, Transport) ->
+       case Transport:recv(Socket, 0, 5000) of
+               {ok, Data} ->
+                       Transport:send(Socket, Data),
+                       loop(Socket, Transport);
+               _ ->
+                       ok = Transport:close(Socket)
+       end.
diff --git a/deps/ranch/examples/tcp_echo/src/tcp_echo.app.src b/deps/ranch/examples/tcp_echo/src/tcp_echo.app.src
new file mode 100644 (file)
index 0000000..af50890
--- /dev/null
@@ -0,0 +1,15 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+{application, tcp_echo, [
+       {description, "Ranch TCP echo example."},
+       {vsn, "1"},
+       {modules, []},
+       {registered, [tcp_echo_sup]},
+       {applications, [
+               kernel,
+               stdlib,
+               ranch
+       ]},
+       {mod, {tcp_echo_app, []}},
+       {env, []}
+]}.
diff --git a/deps/ranch/examples/tcp_echo/src/tcp_echo_app.erl b/deps/ranch/examples/tcp_echo/src/tcp_echo_app.erl
new file mode 100644 (file)
index 0000000..7fac685
--- /dev/null
@@ -0,0 +1,19 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+%% @private
+-module(tcp_echo_app).
+-behaviour(application).
+
+%% API.
+-export([start/2]).
+-export([stop/1]).
+
+%% API.
+
+start(_Type, _Args) ->
+       {ok, _} = ranch:start_listener(tcp_echo, 1,
+               ranch_tcp, [{port, 5555}], echo_protocol, []),
+       tcp_echo_sup:start_link().
+
+stop(_State) ->
+       ok.
diff --git a/deps/ranch/examples/tcp_echo/src/tcp_echo_sup.erl b/deps/ranch/examples/tcp_echo/src/tcp_echo_sup.erl
new file mode 100644 (file)
index 0000000..8f33593
--- /dev/null
@@ -0,0 +1,22 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+%% @private
+-module(tcp_echo_sup).
+-behaviour(supervisor).
+
+%% API.
+-export([start_link/0]).
+
+%% supervisor.
+-export([init/1]).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%% supervisor.
+
+init([]) ->
+       {ok, {{one_for_one, 10, 10}, []}}.
diff --git a/deps/ranch/examples/tcp_reverse/Makefile b/deps/ranch/examples/tcp_reverse/Makefile
new file mode 100644 (file)
index 0000000..b391e81
--- /dev/null
@@ -0,0 +1,3 @@
+PROJECT = tcp_reverse
+DEPS = ranch
+include ../../erlang.mk
diff --git a/deps/ranch/examples/tcp_reverse/README.md b/deps/ranch/examples/tcp_reverse/README.md
new file mode 100644 (file)
index 0000000..745ad2c
--- /dev/null
@@ -0,0 +1,33 @@
+Ranch TCP reverse example
+=========================
+
+This example uses a `gen_server` to handle a protocol to revese input.
+See `reverse_protocol.erl` for the implementation. Documentation about
+this topic can be found in the guide:
+
+  http://ninenines.eu/docs/en/ranch/HEAD/guide/protocols/#using_gen_server
+
+To try this example, you need GNU `make` and `git` in your PATH.
+
+To build the example, run the following command:
+
+``` bash
+$ make
+```
+
+To start the release in the foreground:
+
+``` bash
+$ ./_rel/tcp_reverse_example/bin/tcp_reverse_example console
+```
+
+Then start a telnet session to port 5555:
+
+``` bash
+$ telnet localhost 5555
+```
+
+Type in a few words and see them reversed! Amazing!
+
+Be aware that there is a timeout of 5 seconds without receiving
+data before the example server disconnects your session.
diff --git a/deps/ranch/examples/tcp_reverse/relx.config b/deps/ranch/examples/tcp_reverse/relx.config
new file mode 100644 (file)
index 0000000..2a83916
--- /dev/null
@@ -0,0 +1,2 @@
+{release, {tcp_reverse_example, "1"}, [tcp_reverse]}.
+{extended_start_script, true}.
diff --git a/deps/ranch/examples/tcp_reverse/src/reverse_protocol.erl b/deps/ranch/examples/tcp_reverse/src/reverse_protocol.erl
new file mode 100644 (file)
index 0000000..6f7c770
--- /dev/null
@@ -0,0 +1,73 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+-module(reverse_protocol).
+-behaviour(gen_server).
+-behaviour(ranch_protocol).
+
+%% API.
+-export([start_link/4]).
+
+%% gen_server.
+-export([init/1]).
+-export([init/4]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-define(TIMEOUT, 5000).
+
+-record(state, {socket, transport}).
+
+%% API.
+
+start_link(Ref, Socket, Transport, Opts) ->
+       proc_lib:start_link(?MODULE, init, [Ref, Socket, Transport, Opts]).
+
+%% gen_server.
+
+%% This function is never called. We only define it so that
+%% we can use the -behaviour(gen_server) attribute.
+init([]) -> {ok, undefined}.
+
+init(Ref, Socket, Transport, _Opts = []) ->
+       ok = proc_lib:init_ack({ok, self()}),
+       ok = ranch:accept_ack(Ref),
+       ok = Transport:setopts(Socket, [{active, once}]),
+       gen_server:enter_loop(?MODULE, [],
+               #state{socket=Socket, transport=Transport},
+               ?TIMEOUT).
+
+handle_info({tcp, Socket, Data}, State=#state{
+               socket=Socket, transport=Transport}) ->
+       Transport:setopts(Socket, [{active, once}]),
+       Transport:send(Socket, reverse_binary(Data)),
+       {noreply, State, ?TIMEOUT};
+handle_info({tcp_closed, _Socket}, State) ->
+       {stop, normal, State};
+handle_info({tcp_error, _, Reason}, State) ->
+       {stop, Reason, State};
+handle_info(timeout, State) ->
+       {stop, normal, State};
+handle_info(_Info, State) ->
+       {stop, normal, State}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ok, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+
+%% Internal.
+
+reverse_binary(B) when is_binary(B) ->
+       [list_to_binary(lists:reverse(binary_to_list(
+               binary:part(B, {0, byte_size(B)-2})
+       ))), "\r\n"].
diff --git a/deps/ranch/examples/tcp_reverse/src/tcp_reverse.app.src b/deps/ranch/examples/tcp_reverse/src/tcp_reverse.app.src
new file mode 100644 (file)
index 0000000..46cfca7
--- /dev/null
@@ -0,0 +1,15 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+{application, tcp_reverse, [
+       {description, "Ranch TCP reverse example."},
+       {vsn, "1"},
+       {modules, []},
+       {registered, [tcp_reverse_sup]},
+       {applications, [
+               kernel,
+               stdlib,
+               ranch
+       ]},
+       {mod, {tcp_reverse_app, []}},
+       {env, []}
+]}.
diff --git a/deps/ranch/examples/tcp_reverse/src/tcp_reverse_app.erl b/deps/ranch/examples/tcp_reverse/src/tcp_reverse_app.erl
new file mode 100644 (file)
index 0000000..106e527
--- /dev/null
@@ -0,0 +1,19 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+%% @private
+-module(tcp_reverse_app).
+-behaviour(application).
+
+%% API.
+-export([start/2]).
+-export([stop/1]).
+
+%% API.
+
+start(_Type, _Args) ->
+    {ok, _} = ranch:start_listener(tcp_reverse, 10,
+               ranch_tcp, [{port, 5555}], reverse_protocol, []),
+    tcp_reverse_sup:start_link().
+
+stop(_State) ->
+       ok.
diff --git a/deps/ranch/examples/tcp_reverse/src/tcp_reverse_sup.erl b/deps/ranch/examples/tcp_reverse/src/tcp_reverse_sup.erl
new file mode 100644 (file)
index 0000000..4264d18
--- /dev/null
@@ -0,0 +1,22 @@
+%% Feel free to use, reuse and abuse the code in this file.
+
+%% @private
+-module(tcp_reverse_sup).
+-behaviour(supervisor).
+
+%% API.
+-export([start_link/0]).
+
+%% supervisor.
+-export([init/1]).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+    supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%% supervisor.
+
+init([]) ->
+       {ok, {{one_for_one, 10, 10}, []}}.
diff --git a/deps/ranch/src/ranch.app.src b/deps/ranch/src/ranch.app.src
new file mode 100644 (file)
index 0000000..3dbb86d
--- /dev/null
@@ -0,0 +1,9 @@
+{application,ranch,
+             [{description,"Socket acceptor pool for TCP protocols."},
+              {vsn,"1.2.1"},
+              {id,"git"},
+              {modules,[]},
+              {registered,[ranch_sup,ranch_server]},
+              {applications,[kernel,stdlib]},
+              {mod,{ranch_app,[]}},
+              {env,[]}]}.
diff --git a/deps/ranch/src/ranch.erl b/deps/ranch/src/ranch.erl
new file mode 100644 (file)
index 0000000..32a0be6
--- /dev/null
@@ -0,0 +1,196 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch).
+
+-export([start_listener/6]).
+-export([stop_listener/1]).
+-export([child_spec/6]).
+-export([accept_ack/1]).
+-export([remove_connection/1]).
+-export([get_addr/1]).
+-export([get_port/1]).
+-export([get_max_connections/1]).
+-export([set_max_connections/2]).
+-export([get_protocol_options/1]).
+-export([set_protocol_options/2]).
+-export([filter_options/3]).
+-export([set_option_default/3]).
+-export([require/1]).
+
+-type max_conns() :: non_neg_integer() | infinity.
+-export_type([max_conns/0]).
+
+-type opt() :: {ack_timeout, timeout()}
+       | {connection_type, worker | supervisor}
+       | {max_connections, max_conns()}
+       | {shutdown, timeout() | brutal_kill}
+       | {socket, any()}.
+-export_type([opt/0]).
+
+-type ref() :: any().
+-export_type([ref/0]).
+
+-spec start_listener(ref(), non_neg_integer(), module(), any(), module(), any())
+       -> supervisor:startchild_ret().
+start_listener(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts)
+               when is_integer(NbAcceptors) andalso is_atom(Transport)
+               andalso is_atom(Protocol) ->
+       _ = code:ensure_loaded(Transport),
+       %% @todo Remove in Ranch 2.0 and simply require ssl.
+       _ = ensure_ssl(Transport),
+       case erlang:function_exported(Transport, name, 0) of
+               false ->
+                       {error, badarg};
+               true ->
+                       Res = supervisor:start_child(ranch_sup, child_spec(Ref, NbAcceptors,
+                                       Transport, TransOpts, Protocol, ProtoOpts)),
+                       Socket = proplists:get_value(socket, TransOpts),
+                       case Res of
+                               {ok, Pid} when Socket =/= undefined ->
+                                       %% Give ownership of the socket to ranch_acceptors_sup
+                                       %% to make sure the socket stays open as long as the
+                                       %% listener is alive. If the socket closes however there
+                                       %% will be no way to recover because we don't know how
+                                       %% to open it again.
+                                       Children = supervisor:which_children(Pid),
+                                       {_, AcceptorsSup, _, _}
+                                               = lists:keyfind(ranch_acceptors_sup, 1, Children),
+                                       %%% Note: the catch is here because SSL crashes when you change
+                                       %%% the controlling process of a listen socket because of a bug.
+                                       %%% The bug will be fixed in R16.
+                                       catch Transport:controlling_process(Socket, AcceptorsSup);
+                               _ ->
+                                       ok
+                       end,
+                       Res
+       end.
+
+-spec stop_listener(ref()) -> ok | {error, not_found}.
+stop_listener(Ref) ->
+       case supervisor:terminate_child(ranch_sup, {ranch_listener_sup, Ref}) of
+               ok ->
+                       _ = supervisor:delete_child(ranch_sup, {ranch_listener_sup, Ref}),
+                       ranch_server:cleanup_listener_opts(Ref);
+               {error, Reason} ->
+                       {error, Reason}
+       end.
+
+-spec child_spec(ref(), non_neg_integer(), module(), any(), module(), any())
+       -> supervisor:child_spec().
+child_spec(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts)
+               when is_integer(NbAcceptors) andalso is_atom(Transport)
+               andalso is_atom(Protocol) ->
+       %% @todo Remove in Ranch 2.0 and simply require ssl.
+       _ = ensure_ssl(Transport),
+       {{ranch_listener_sup, Ref}, {ranch_listener_sup, start_link, [
+               Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts
+       ]}, permanent, infinity, supervisor, [ranch_listener_sup]}.
+
+%% @todo Remove in Ranch 2.0 and simply require ssl.
+ensure_ssl(ranch_ssl) ->
+       require([crypto, asn1, public_key, ssl]);
+ensure_ssl(_) ->
+       ok.
+
+-spec accept_ack(ref()) -> ok.
+accept_ack(Ref) ->
+       receive {shoot, Ref, Transport, Socket, AckTimeout} ->
+               Transport:accept_ack(Socket, AckTimeout)
+       end.
+
+-spec remove_connection(ref()) -> ok.
+remove_connection(Ref) ->
+       ConnsSup = ranch_server:get_connections_sup(Ref),
+       ConnsSup ! {remove_connection, Ref},
+       ok.
+
+-spec get_addr(ref()) -> {inet:ip_address(), inet:port_number()}.
+get_addr(Ref) ->
+       ranch_server:get_addr(Ref).
+
+-spec get_port(ref()) -> inet:port_number().
+get_port(Ref) ->
+       {_, Port} = get_addr(Ref),
+       Port.
+
+-spec get_max_connections(ref()) -> max_conns().
+get_max_connections(Ref) ->
+       ranch_server:get_max_connections(Ref).
+
+-spec set_max_connections(ref(), max_conns()) -> ok.
+set_max_connections(Ref, MaxConnections) ->
+       ranch_server:set_max_connections(Ref, MaxConnections).
+
+-spec get_protocol_options(ref()) -> any().
+get_protocol_options(Ref) ->
+       ranch_server:get_protocol_options(Ref).
+
+-spec set_protocol_options(ref(), any()) -> ok.
+set_protocol_options(Ref, Opts) ->
+       ranch_server:set_protocol_options(Ref, Opts).
+
+-spec filter_options([inet | inet6 | {atom(), any()} | {raw, any(), any(), any()}],
+       [atom()], Acc) -> Acc when Acc :: [any()].
+filter_options(UserOptions, AllowedKeys, DefaultOptions) ->
+       AllowedOptions = filter_user_options(UserOptions, AllowedKeys),
+       lists:foldl(fun merge_options/2, DefaultOptions, AllowedOptions).
+
+%% 2-tuple options.
+filter_user_options([Opt = {Key, _}|Tail], AllowedKeys) ->
+       case lists:member(Key, AllowedKeys) of
+               true ->
+                       [Opt|filter_user_options(Tail, AllowedKeys)];
+               false ->
+                       filter_options_warning(Opt),
+                       filter_user_options(Tail, AllowedKeys)
+       end;
+%% Special option forms.
+filter_user_options([inet|Tail], AllowedKeys) ->
+       [inet|filter_user_options(Tail, AllowedKeys)];
+filter_user_options([inet6|Tail], AllowedKeys) ->
+       [inet6|filter_user_options(Tail, AllowedKeys)];
+filter_user_options([Opt = {raw, _, _, _}|Tail], AllowedKeys) ->
+       [Opt|filter_user_options(Tail, AllowedKeys)];
+filter_user_options([Opt|Tail], AllowedKeys) ->
+       filter_options_warning(Opt),
+       filter_user_options(Tail, AllowedKeys);
+filter_user_options([], _) ->
+       [].
+
+filter_options_warning(Opt) ->
+       error_logger:warning_msg("Transport option ~p unknown or invalid.~n", [Opt]).
+
+merge_options({Key, _} = Option, OptionList) ->
+       lists:keystore(Key, 1, OptionList, Option);
+merge_options(Option, OptionList) ->
+       [Option|OptionList].
+
+-spec set_option_default(Opts, atom(), any())
+       -> Opts when Opts :: [{atom(), any()}].
+set_option_default(Opts, Key, Value) ->
+       case lists:keymember(Key, 1, Opts) of
+               true -> Opts;
+               false -> [{Key, Value}|Opts]
+       end.
+
+-spec require([atom()]) -> ok.
+require([]) ->
+       ok;
+require([App|Tail]) ->
+       case application:start(App) of
+               ok -> ok;
+               {error, {already_started, App}} -> ok
+       end,
+       require(Tail).
diff --git a/deps/ranch/src/ranch_acceptor.erl b/deps/ranch/src/ranch_acceptor.erl
new file mode 100644 (file)
index 0000000..3b28369
--- /dev/null
@@ -0,0 +1,58 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_acceptor).
+
+-export([start_link/3]).
+-export([loop/3]).
+
+-spec start_link(inet:socket(), module(), pid())
+       -> {ok, pid()}.
+start_link(LSocket, Transport, ConnsSup) ->
+       Pid = spawn_link(?MODULE, loop, [LSocket, Transport, ConnsSup]),
+       {ok, Pid}.
+
+-spec loop(inet:socket(), module(), pid()) -> no_return().
+loop(LSocket, Transport, ConnsSup) ->
+       _ = case Transport:accept(LSocket, infinity) of
+               {ok, CSocket} ->
+                       case Transport:controlling_process(CSocket, ConnsSup) of
+                               ok ->
+                                       %% This call will not return until process has been started
+                                       %% AND we are below the maximum number of connections.
+                                       ranch_conns_sup:start_protocol(ConnsSup, CSocket);
+                               {error, _} ->
+                                       Transport:close(CSocket)
+                       end;
+               %% Reduce the accept rate if we run out of file descriptors.
+               %% We can't accept anymore anyway, so we might as well wait
+               %% a little for the situation to resolve itself.
+               {error, emfile} ->
+                       receive after 100 -> ok end;
+               %% We want to crash if the listening socket got closed.
+               {error, Reason} when Reason =/= closed ->
+                       ok
+       end,
+       flush(),
+       ?MODULE:loop(LSocket, Transport, ConnsSup).
+
+flush() ->
+       receive Msg ->
+               error_logger:error_msg(
+                       "Ranch acceptor received unexpected message: ~p~n",
+                       [Msg]),
+               flush()
+       after 0 ->
+               ok
+       end.
diff --git a/deps/ranch/src/ranch_acceptors_sup.erl b/deps/ranch/src/ranch_acceptors_sup.erl
new file mode 100644 (file)
index 0000000..497ccfe
--- /dev/null
@@ -0,0 +1,56 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_acceptors_sup).
+-behaviour(supervisor).
+
+-export([start_link/4]).
+-export([init/1]).
+
+-spec start_link(ranch:ref(), non_neg_integer(), module(), any())
+       -> {ok, pid()}.
+start_link(Ref, NbAcceptors, Transport, TransOpts) ->
+       supervisor:start_link(?MODULE, [Ref, NbAcceptors, Transport, TransOpts]).
+
+init([Ref, NbAcceptors, Transport, TransOpts]) ->
+       ConnsSup = ranch_server:get_connections_sup(Ref),
+       LSocket = case proplists:get_value(socket, TransOpts) of
+               undefined ->
+                       TransOpts2 = proplists:delete(ack_timeout,
+                               proplists:delete(connection_type,
+                               proplists:delete(max_connections,
+                               proplists:delete(shutdown,
+                               proplists:delete(socket, TransOpts))))),
+                       case Transport:listen(TransOpts2) of
+                               {ok, Socket} -> Socket;
+                               {error, Reason} -> listen_error(Ref, Transport, TransOpts2, Reason)
+                       end;
+               Socket ->
+                       Socket
+       end,
+       {ok, Addr} = Transport:sockname(LSocket),
+       ranch_server:set_addr(Ref, Addr),
+       Procs = [
+               {{acceptor, self(), N}, {ranch_acceptor, start_link, [
+                       LSocket, Transport, ConnsSup
+               ]}, permanent, brutal_kill, worker, []}
+                       || N <- lists:seq(1, NbAcceptors)],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+
+-spec listen_error(any(), module(), any(), atom()) -> no_return().
+listen_error(Ref, Transport, TransOpts2, Reason) ->
+       error_logger:error_msg(
+               "Failed to start Ranch listener ~p in ~p:listen(~p) for reason ~p (~s)~n",
+               [Ref, Transport, TransOpts2, Reason, inet:format_error(Reason)]),
+       exit({listen_error, Ref, Reason}).
similarity index 69%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_app.erl
rename to deps/ranch/src/ranch_app.erl
index c7cefe4ca342d086c000009ed75d2e54f1bfd970..d9721515ffa501520a4b6560dfbf2b817b9ff8f8 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
-%% @private
--module(cowboy_app).
+-module(ranch_app).
 -behaviour(application).
 
--export([start/2, stop/1, profile_output/0]). %% API.
+-export([start/2]).
+-export([stop/1]).
+-export([profile_output/0]).
 
--type application_start_type() :: normal
-       | {takeover, node()} | {failover, node()}.
+start(_, _) ->
+       _ = consider_profiling(),
+       ranch_sup:start_link().
 
-%% API.
-
--spec start(application_start_type(), any()) -> {ok, pid()}.
-start(_Type, _Args) ->
-       consider_profiling(),
-       cowboy_sup:start_link().
-
--spec stop(any()) -> ok.
-stop(_State) ->
+stop(_) ->
        ok.
 
 -spec profile_output() -> ok.
@@ -40,9 +34,6 @@ profile_output() ->
        eprof:log("total.profile"),
        eprof:analyze(total).
 
-%% Internal.
-
--spec consider_profiling() -> profiling | not_profiling.
 consider_profiling() ->
        case application:get_env(profile) of
                {ok, true} ->
diff --git a/deps/ranch/src/ranch_conns_sup.erl b/deps/ranch/src/ranch_conns_sup.erl
new file mode 100644 (file)
index 0000000..008b02b
--- /dev/null
@@ -0,0 +1,284 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+%% Make sure to never reload this module outside a release upgrade,
+%% as calling l(ranch_conns_sup) twice will kill the process and all
+%% the currently open connections.
+-module(ranch_conns_sup).
+
+%% API.
+-export([start_link/6]).
+-export([start_protocol/2]).
+-export([active_connections/1]).
+
+%% Supervisor internals.
+-export([init/7]).
+-export([system_continue/3]).
+-export([system_terminate/4]).
+-export([system_code_change/4]).
+
+-type conn_type() :: worker | supervisor.
+-type shutdown() :: brutal_kill | timeout().
+
+-record(state, {
+       parent = undefined :: pid(),
+       ref :: ranch:ref(),
+       conn_type :: conn_type(),
+       shutdown :: shutdown(),
+       transport = undefined :: module(),
+       protocol = undefined :: module(),
+       opts :: any(),
+       ack_timeout :: timeout(),
+       max_conns = undefined :: ranch:max_conns()
+}).
+
+%% API.
+
+-spec start_link(ranch:ref(), conn_type(), shutdown(), module(),
+       timeout(), module()) -> {ok, pid()}.
+start_link(Ref, ConnType, Shutdown, Transport, AckTimeout, Protocol) ->
+       proc_lib:start_link(?MODULE, init,
+               [self(), Ref, ConnType, Shutdown, Transport, AckTimeout, Protocol]).
+
+%% We can safely assume we are on the same node as the supervisor.
+%%
+%% We can also safely avoid having a monitor and a timeout here
+%% because only three things can happen:
+%%  *  The supervisor died; rest_for_one strategy killed all acceptors
+%%     so this very calling process is going to di--
+%%  *  There's too many connections, the supervisor will resume the
+%%     acceptor only when we get below the limit again.
+%%  *  The supervisor is overloaded, there's either too many acceptors
+%%     or the max_connections limit is too large. It's better if we
+%%     don't keep accepting connections because this leaves
+%%     more room for the situation to be resolved.
+%%
+%% We do not need the reply, we only need the ok from the supervisor
+%% to continue. The supervisor sends its own pid when the acceptor can
+%% continue.
+-spec start_protocol(pid(), inet:socket()) -> ok.
+start_protocol(SupPid, Socket) ->
+       SupPid ! {?MODULE, start_protocol, self(), Socket},
+       receive SupPid -> ok end.
+
+%% We can't make the above assumptions here. This function might be
+%% called from anywhere.
+-spec active_connections(pid()) -> non_neg_integer().
+active_connections(SupPid) ->
+       Tag = erlang:monitor(process, SupPid),
+       catch erlang:send(SupPid, {?MODULE, active_connections, self(), Tag},
+               [noconnect]),
+       receive
+               {Tag, Ret} ->
+                       erlang:demonitor(Tag, [flush]),
+                       Ret;
+               {'DOWN', Tag, _, _, noconnection} ->
+                       exit({nodedown, node(SupPid)});
+               {'DOWN', Tag, _, _, Reason} ->
+                       exit(Reason)
+       after 5000 ->
+               erlang:demonitor(Tag, [flush]),
+               exit(timeout)
+       end.
+
+%% Supervisor internals.
+
+-spec init(pid(), ranch:ref(), conn_type(), shutdown(),
+       module(), timeout(), module()) -> no_return().
+init(Parent, Ref, ConnType, Shutdown, Transport, AckTimeout, Protocol) ->
+       process_flag(trap_exit, true),
+       ok = ranch_server:set_connections_sup(Ref, self()),
+       MaxConns = ranch_server:get_max_connections(Ref),
+       Opts = ranch_server:get_protocol_options(Ref),
+       ok = proc_lib:init_ack(Parent, {ok, self()}),
+       loop(#state{parent=Parent, ref=Ref, conn_type=ConnType,
+               shutdown=Shutdown, transport=Transport, protocol=Protocol,
+               opts=Opts, ack_timeout=AckTimeout, max_conns=MaxConns}, 0, 0, []).
+
+loop(State=#state{parent=Parent, ref=Ref, conn_type=ConnType,
+               transport=Transport, protocol=Protocol, opts=Opts,
+               max_conns=MaxConns}, CurConns, NbChildren, Sleepers) ->
+       receive
+               {?MODULE, start_protocol, To, Socket} ->
+                       try Protocol:start_link(Ref, Socket, Transport, Opts) of
+                               {ok, Pid} ->
+                                       shoot(State, CurConns, NbChildren, Sleepers, To, Socket, Pid, Pid);
+                               {ok, SupPid, ProtocolPid} when ConnType =:= supervisor ->
+                                       shoot(State, CurConns, NbChildren, Sleepers, To, Socket, SupPid, ProtocolPid);
+                               Ret ->
+                                       To ! self(),
+                                       error_logger:error_msg(
+                                               "Ranch listener ~p connection process start failure; "
+                                               "~p:start_link/4 returned: ~999999p~n",
+                                               [Ref, Protocol, Ret]),
+                                       Transport:close(Socket),
+                                       loop(State, CurConns, NbChildren, Sleepers)
+                       catch Class:Reason ->
+                               To ! self(),
+                               error_logger:error_msg(
+                                       "Ranch listener ~p connection process start failure; "
+                                       "~p:start_link/4 crashed with reason: ~p:~999999p~n",
+                                       [Ref, Protocol, Class, Reason]),
+                               loop(State, CurConns, NbChildren, Sleepers)
+                       end;
+               {?MODULE, active_connections, To, Tag} ->
+                       To ! {Tag, CurConns},
+                       loop(State, CurConns, NbChildren, Sleepers);
+               %% Remove a connection from the count of connections.
+               {remove_connection, Ref} ->
+                       loop(State, CurConns - 1, NbChildren, Sleepers);
+               %% Upgrade the max number of connections allowed concurrently.
+               %% We resume all sleeping acceptors if this number increases.
+               {set_max_conns, MaxConns2} when MaxConns2 > MaxConns ->
+                       _ = [To ! self() || To <- Sleepers],
+                       loop(State#state{max_conns=MaxConns2},
+                               CurConns, NbChildren, []);
+               {set_max_conns, MaxConns2} ->
+                       loop(State#state{max_conns=MaxConns2},
+                               CurConns, NbChildren, Sleepers);
+               %% Upgrade the protocol options.
+               {set_opts, Opts2} ->
+                       loop(State#state{opts=Opts2},
+                               CurConns, NbChildren, Sleepers);
+               {'EXIT', Parent, Reason} ->
+                       terminate(State, Reason, NbChildren);
+               {'EXIT', Pid, Reason} when Sleepers =:= [] ->
+                       report_error(Ref, Protocol, Pid, Reason),
+                       erase(Pid),
+                       loop(State, CurConns - 1, NbChildren - 1, Sleepers);
+               %% Resume a sleeping acceptor if needed.
+               {'EXIT', Pid, Reason} ->
+                       report_error(Ref, Protocol, Pid, Reason),
+                       erase(Pid),
+                       [To|Sleepers2] = Sleepers,
+                       To ! self(),
+                       loop(State, CurConns - 1, NbChildren - 1, Sleepers2);
+               {system, From, Request} ->
+                       sys:handle_system_msg(Request, From, Parent, ?MODULE, [],
+                               {State, CurConns, NbChildren, Sleepers});
+               %% Calls from the supervisor module.
+               {'$gen_call', {To, Tag}, which_children} ->
+                       Pids = get_keys(true),
+                       Children = [{Protocol, Pid, ConnType, [Protocol]}
+                               || Pid <- Pids, is_pid(Pid)],
+                       To ! {Tag, Children},
+                       loop(State, CurConns, NbChildren, Sleepers);
+               {'$gen_call', {To, Tag}, count_children} ->
+                       Counts = case ConnType of
+                               worker -> [{supervisors, 0}, {workers, NbChildren}];
+                               supervisor -> [{supervisors, NbChildren}, {workers, 0}]
+                       end,
+                       Counts2 = [{specs, 1}, {active, NbChildren}|Counts],
+                       To ! {Tag, Counts2},
+                       loop(State, CurConns, NbChildren, Sleepers);
+               {'$gen_call', {To, Tag}, _} ->
+                       To ! {Tag, {error, ?MODULE}},
+                       loop(State, CurConns, NbChildren, Sleepers);
+               Msg ->
+                       error_logger:error_msg(
+                               "Ranch listener ~p received unexpected message ~p~n",
+                               [Ref, Msg])
+       end.
+
+shoot(State=#state{ref=Ref, transport=Transport, ack_timeout=AckTimeout, max_conns=MaxConns},
+               CurConns, NbChildren, Sleepers, To, Socket, SupPid, ProtocolPid) ->
+       case Transport:controlling_process(Socket, ProtocolPid) of
+               ok ->
+                       ProtocolPid ! {shoot, Ref, Transport, Socket, AckTimeout},
+                       put(SupPid, true),
+                       CurConns2 = CurConns + 1,
+                       if CurConns2 < MaxConns ->
+                                       To ! self(),
+                                       loop(State, CurConns2, NbChildren + 1, Sleepers);
+                               true ->
+                                       loop(State, CurConns2, NbChildren + 1, [To|Sleepers])
+                       end;
+               {error, _} ->
+                       Transport:close(Socket),
+                       %% Only kill the supervised pid, because the connection's pid,
+                       %% when different, is supposed to be sitting under it and linked.
+                       exit(SupPid, kill),
+                       loop(State, CurConns, NbChildren, Sleepers)
+       end.
+
+-spec terminate(#state{}, any(), non_neg_integer()) -> no_return().
+%% Kill all children and then exit. We unlink first to avoid
+%% getting a message for each child getting killed.
+terminate(#state{shutdown=brutal_kill}, Reason, _) ->
+       Pids = get_keys(true),
+       _ = [begin
+               unlink(P),
+               exit(P, kill)
+       end || P <- Pids],
+       exit(Reason);
+%% Attempt to gracefully shutdown all children.
+terminate(#state{shutdown=Shutdown}, Reason, NbChildren) ->
+       shutdown_children(),
+       _ = if
+               Shutdown =:= infinity ->
+                       ok;
+               true ->
+                       erlang:send_after(Shutdown, self(), kill)
+       end,
+       wait_children(NbChildren),
+       exit(Reason).
+
+%% Monitor processes so we can know which ones have shutdown
+%% before the timeout. Unlink so we avoid receiving an extra
+%% message. Then send a shutdown exit signal.
+shutdown_children() ->
+       Pids = get_keys(true),
+       _ = [begin
+               monitor(process, P),
+               unlink(P),
+               exit(P, shutdown)
+       end || P <- Pids],
+       ok.
+
+wait_children(0) ->
+       ok;
+wait_children(NbChildren) ->
+       receive
+        {'DOWN', _, process, Pid, _} ->
+                       _ = erase(Pid),
+                       wait_children(NbChildren - 1);
+               kill ->
+                       Pids = get_keys(true),
+                       _ = [exit(P, kill) || P <- Pids],
+                       ok
+       end.
+
+system_continue(_, _, {State, CurConns, NbChildren, Sleepers}) ->
+       loop(State, CurConns, NbChildren, Sleepers).
+
+-spec system_terminate(any(), _, _, _) -> no_return().
+system_terminate(Reason, _, _, {State, _, NbChildren, _}) ->
+       terminate(State, Reason, NbChildren).
+
+system_code_change(Misc, _, _, _) ->
+       {ok, Misc}.
+
+%% We use ~999999p here instead of ~w because the latter doesn't
+%% support printable strings.
+report_error(_, _, _, normal) ->
+       ok;
+report_error(_, _, _, shutdown) ->
+       ok;
+report_error(_, _, _, {shutdown, _}) ->
+       ok;
+report_error(Ref, Protocol, Pid, Reason) ->
+       error_logger:error_msg(
+               "Ranch listener ~p had connection process started with "
+               "~p:start_link/4 at ~p exit with reason: ~999999p~n",
+               [Ref, Protocol, Pid, Reason]).
diff --git a/deps/ranch/src/ranch_listener_sup.erl b/deps/ranch/src/ranch_listener_sup.erl
new file mode 100644 (file)
index 0000000..294cd1d
--- /dev/null
@@ -0,0 +1,42 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_listener_sup).
+-behaviour(supervisor).
+
+-export([start_link/6]).
+-export([init/1]).
+
+-spec start_link(ranch:ref(), non_neg_integer(), module(), any(), module(), any())
+       -> {ok, pid()}.
+start_link(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) ->
+       MaxConns = proplists:get_value(max_connections, TransOpts, 1024),
+       ranch_server:set_new_listener_opts(Ref, MaxConns, ProtoOpts),
+       supervisor:start_link(?MODULE, {
+               Ref, NbAcceptors, Transport, TransOpts, Protocol
+       }).
+
+init({Ref, NbAcceptors, Transport, TransOpts, Protocol}) ->
+       AckTimeout = proplists:get_value(ack_timeout, TransOpts, 5000),
+       ConnType = proplists:get_value(connection_type, TransOpts, worker),
+       Shutdown = proplists:get_value(shutdown, TransOpts, 5000),
+       ChildSpecs = [
+               {ranch_conns_sup, {ranch_conns_sup, start_link,
+                               [Ref, ConnType, Shutdown, Transport, AckTimeout, Protocol]},
+                       permanent, infinity, supervisor, [ranch_conns_sup]},
+               {ranch_acceptors_sup, {ranch_acceptors_sup, start_link,
+                               [Ref, NbAcceptors, Transport, TransOpts]},
+                       permanent, infinity, supervisor, [ranch_acceptors_sup]}
+       ],
+       {ok, {{rest_for_one, 1, 5}, ChildSpecs}}.
diff --git a/deps/ranch/src/ranch_protocol.erl b/deps/ranch/src/ranch_protocol.erl
new file mode 100644 (file)
index 0000000..1f70962
--- /dev/null
@@ -0,0 +1,24 @@
+%% Copyright (c) 2012-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_protocol).
+
+%% Start a new connection process for the given socket.
+-callback start_link(
+               Ref::ranch:ref(),
+               Socket::any(),
+               Transport::module(),
+               ProtocolOptions::any())
+       -> {ok, ConnectionPid::pid()}
+       | {ok, SupPid::pid(), ConnectionPid::pid()}.
diff --git a/deps/ranch/src/ranch_server.erl b/deps/ranch/src/ranch_server.erl
new file mode 100644 (file)
index 0000000..55ebafb
--- /dev/null
@@ -0,0 +1,154 @@
+%% Copyright (c) 2012-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_server).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+-export([set_new_listener_opts/3]).
+-export([cleanup_listener_opts/1]).
+-export([set_connections_sup/2]).
+-export([get_connections_sup/1]).
+-export([set_addr/2]).
+-export([get_addr/1]).
+-export([set_max_connections/2]).
+-export([get_max_connections/1]).
+-export([set_protocol_options/2]).
+-export([get_protocol_options/1]).
+-export([count_connections/1]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-define(TAB, ?MODULE).
+
+-type monitors() :: [{{reference(), pid()}, any()}].
+-record(state, {
+       monitors = [] :: monitors()
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+-spec set_new_listener_opts(ranch:ref(), ranch:max_conns(), any()) -> ok.
+set_new_listener_opts(Ref, MaxConns, Opts) ->
+       gen_server:call(?MODULE, {set_new_listener_opts, Ref, MaxConns, Opts}).
+
+-spec cleanup_listener_opts(ranch:ref()) -> ok.
+cleanup_listener_opts(Ref) ->
+       _ = ets:delete(?TAB, {addr, Ref}),
+       _ = ets:delete(?TAB, {max_conns, Ref}),
+       _ = ets:delete(?TAB, {opts, Ref}),
+       ok.
+
+-spec set_connections_sup(ranch:ref(), pid()) -> ok.
+set_connections_sup(Ref, Pid) ->
+       true = gen_server:call(?MODULE, {set_connections_sup, Ref, Pid}),
+       ok.
+
+-spec get_connections_sup(ranch:ref()) -> pid().
+get_connections_sup(Ref) ->
+       ets:lookup_element(?TAB, {conns_sup, Ref}, 2).
+
+-spec set_addr(ranch:ref(), {inet:ip_address(), inet:port_number()}) -> ok.
+set_addr(Ref, Addr) ->
+       gen_server:call(?MODULE, {set_addr, Ref, Addr}).
+
+-spec get_addr(ranch:ref()) -> {inet:ip_address(), inet:port_number()}.
+get_addr(Ref) ->
+       ets:lookup_element(?TAB, {addr, Ref}, 2).
+
+-spec set_max_connections(ranch:ref(), ranch:max_conns()) -> ok.
+set_max_connections(Ref, MaxConnections) ->
+       gen_server:call(?MODULE, {set_max_conns, Ref, MaxConnections}).
+
+-spec get_max_connections(ranch:ref()) -> ranch:max_conns().
+get_max_connections(Ref) ->
+       ets:lookup_element(?TAB, {max_conns, Ref}, 2).
+
+-spec set_protocol_options(ranch:ref(), any()) -> ok.
+set_protocol_options(Ref, ProtoOpts) ->
+       gen_server:call(?MODULE, {set_opts, Ref, ProtoOpts}).
+
+-spec get_protocol_options(ranch:ref()) -> any().
+get_protocol_options(Ref) ->
+       ets:lookup_element(?TAB, {opts, Ref}, 2).
+
+-spec count_connections(ranch:ref()) -> non_neg_integer().
+count_connections(Ref) ->
+       ranch_conns_sup:active_connections(get_connections_sup(Ref)).
+
+%% gen_server.
+
+init([]) ->
+       Monitors = [{{erlang:monitor(process, Pid), Pid}, Ref} ||
+               [Ref, Pid] <- ets:match(?TAB, {{conns_sup, '$1'}, '$2'})],
+       {ok, #state{monitors=Monitors}}.
+
+handle_call({set_new_listener_opts, Ref, MaxConns, Opts}, _, State) ->
+       ets:insert(?TAB, {{max_conns, Ref}, MaxConns}),
+       ets:insert(?TAB, {{opts, Ref}, Opts}),
+       {reply, ok, State};
+handle_call({set_connections_sup, Ref, Pid}, _,
+               State=#state{monitors=Monitors}) ->
+       case ets:insert_new(?TAB, {{conns_sup, Ref}, Pid}) of
+               true ->
+                       MonitorRef = erlang:monitor(process, Pid),
+                       {reply, true,
+                               State#state{monitors=[{{MonitorRef, Pid}, Ref}|Monitors]}};
+               false ->
+                       {reply, false, State}
+       end;
+handle_call({set_addr, Ref, Addr}, _, State) ->
+       true = ets:insert(?TAB, {{addr, Ref}, Addr}),
+       {reply, ok, State};
+handle_call({set_max_conns, Ref, MaxConns}, _, State) ->
+       ets:insert(?TAB, {{max_conns, Ref}, MaxConns}),
+       ConnsSup = get_connections_sup(Ref),
+       ConnsSup ! {set_max_conns, MaxConns},
+       {reply, ok, State};
+handle_call({set_opts, Ref, Opts}, _, State) ->
+       ets:insert(?TAB, {{opts, Ref}, Opts}),
+       ConnsSup = get_connections_sup(Ref),
+       ConnsSup ! {set_opts, Opts},
+       {reply, ok, State};
+handle_call(_Request, _From, State) ->
+       {reply, ignore, State}.
+
+handle_cast(_Request, State) ->
+       {noreply, State}.
+
+handle_info({'DOWN', MonitorRef, process, Pid, _},
+               State=#state{monitors=Monitors}) ->
+       {_, Ref} = lists:keyfind({MonitorRef, Pid}, 1, Monitors),
+       true = ets:delete(?TAB, {conns_sup, Ref}),
+       Monitors2 = lists:keydelete({MonitorRef, Pid}, 1, Monitors),
+       {noreply, State#state{monitors=Monitors2}};
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
diff --git a/deps/ranch/src/ranch_ssl.erl b/deps/ranch/src/ranch_ssl.erl
new file mode 100644 (file)
index 0000000..e9bbff2
--- /dev/null
@@ -0,0 +1,226 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_ssl).
+-behaviour(ranch_transport).
+
+-export([name/0]).
+-export([secure/0]).
+-export([messages/0]).
+-export([listen/1]).
+-export([listen_options/0]).
+-export([accept/2]).
+-export([accept_ack/2]).
+-export([connect/3]).
+-export([connect/4]).
+-export([recv/3]).
+-export([send/2]).
+-export([sendfile/2]).
+-export([sendfile/4]).
+-export([sendfile/5]).
+-export([setopts/2]).
+-export([controlling_process/2]).
+-export([peername/1]).
+-export([sockname/1]).
+-export([shutdown/2]).
+-export([close/1]).
+
+-type ssl_opt() :: {alpn_preferred_protocols, [binary()]}
+       | {cacertfile, string()}
+       | {cacerts, [public_key:der_encoded()]}
+       | {cert, public_key:der_encoded()}
+       | {certfile, string()}
+       | {ciphers, [ssl:erl_cipher_suite()] | string()}
+       | {client_renegotiation, boolean()}
+       | {crl_cache, {module(), {internal | any(), list()}}}
+       | {crl_check, boolean() | peer | best_effort}
+       | {depth, 0..255}
+       | {dh, public_key:der_encoded()}
+       | {dhfile, string()}
+       | {fail_if_no_peer_cert, boolean()}
+       | {hibernate_after, integer() | undefined}
+       | {honor_cipher_order, boolean()}
+       | {key, {'RSAPrivateKey' | 'DSAPrivateKey' | 'PrivateKeyInfo', public_key:der_encoded()}}
+       | {keyfile, string()}
+       | {log_alert, boolean()}
+       | {next_protocols_advertised, [binary()]}
+       | {partial_chain, fun(([public_key:der_encoded()]) -> {trusted_ca, public_key:der_encoded()} | unknown_ca)}
+       | {password, string()}
+       | {psk_identity, string()}
+       | {reuse_session, fun()}
+       | {reuse_sessions, boolean()}
+       | {secure_renegotiate, boolean()}
+       | {sni_fun, fun()}
+       | {sni_hosts, [{string(), ssl_opt()}]}
+       | {user_lookup_fun, {fun(), any()}}
+       | {verify, ssl:verify_type()}
+       | {verify_fun, {fun(), any()}}
+       | {versions, [atom()]}.
+-export_type([ssl_opt/0]).
+
+-type opt() :: ranch_tcp:opt() | ssl_opt().
+-export_type([opt/0]).
+
+-type opts() :: [opt()].
+-export_type([opts/0]).
+
+name() -> ssl.
+
+-spec secure() -> boolean().
+secure() ->
+    true.
+
+messages() -> {ssl, ssl_closed, ssl_error}.
+
+-spec listen(opts()) -> {ok, ssl:sslsocket()} | {error, atom()}.
+listen(Opts) ->
+       true = lists:keymember(cert, 1, Opts)
+               orelse lists:keymember(certfile, 1, Opts),
+       Opts2 = ranch:set_option_default(Opts, backlog, 1024),
+       Opts3 = ranch:set_option_default(Opts2, ciphers, unbroken_cipher_suites()),
+       Opts4 = ranch:set_option_default(Opts3, nodelay, true),
+       Opts5 = ranch:set_option_default(Opts4, send_timeout, 30000),
+       Opts6 = ranch:set_option_default(Opts5, send_timeout_close, true),
+       %% We set the port to 0 because it is given in the Opts directly.
+       %% The port in the options takes precedence over the one in the
+       %% first argument.
+       ssl:listen(0, ranch:filter_options(Opts6, listen_options(),
+               [binary, {active, false}, {packet, raw},
+                       {reuseaddr, true}, {nodelay, true}])).
+
+listen_options() ->
+       [alpn_preferred_protocols, cacertfile, cacerts, cert, certfile,
+               ciphers, client_renegotiation, crl_cache, crl_check, depth,
+               dh, dhfile, fail_if_no_peer_cert, hibernate_after, honor_cipher_order,
+               key, keyfile, log_alert, next_protocols_advertised, partial_chain,
+               password, psk_identity, reuse_session, reuse_sessions, secure_renegotiate,
+               sni_fun, sni_hosts, user_lookup_fun, verify, verify_fun, versions
+               |ranch_tcp:listen_options()].
+
+-spec accept(ssl:sslsocket(), timeout())
+       -> {ok, ssl:sslsocket()} | {error, closed | timeout | atom()}.
+accept(LSocket, Timeout) ->
+       ssl:transport_accept(LSocket, Timeout).
+
+-spec accept_ack(ssl:sslsocket(), timeout()) -> ok.
+accept_ack(CSocket, Timeout) ->
+       case ssl:ssl_accept(CSocket, Timeout) of
+               ok ->
+                       ok;
+               %% Garbage was most likely sent to the socket, don't error out.
+               {error, {tls_alert, _}} ->
+                       ok = close(CSocket),
+                       exit(normal);
+               %% Socket most likely stopped responding, don't error out.
+               {error, Reason} when Reason =:= timeout; Reason =:= closed ->
+                       ok = close(CSocket),
+                       exit(normal);
+               {error, Reason} ->
+                       ok = close(CSocket),
+                       error(Reason)
+       end.
+
+%% @todo Probably filter Opts?
+-spec connect(inet:ip_address() | inet:hostname(),
+       inet:port_number(), any())
+       -> {ok, inet:socket()} | {error, atom()}.
+connect(Host, Port, Opts) when is_integer(Port) ->
+       ssl:connect(Host, Port,
+               Opts ++ [binary, {active, false}, {packet, raw}]).
+
+%% @todo Probably filter Opts?
+-spec connect(inet:ip_address() | inet:hostname(),
+       inet:port_number(), any(), timeout())
+       -> {ok, inet:socket()} | {error, atom()}.
+connect(Host, Port, Opts, Timeout) when is_integer(Port) ->
+       ssl:connect(Host, Port,
+               Opts ++ [binary, {active, false}, {packet, raw}],
+               Timeout).
+
+-spec recv(ssl:sslsocket(), non_neg_integer(), timeout())
+       -> {ok, any()} | {error, closed | atom()}.
+recv(Socket, Length, Timeout) ->
+       ssl:recv(Socket, Length, Timeout).
+
+-spec send(ssl:sslsocket(), iodata()) -> ok | {error, atom()}.
+send(Socket, Packet) ->
+       ssl:send(Socket, Packet).
+
+-spec sendfile(ssl:sslsocket(), file:name_all() | file:fd())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Socket, Filename) ->
+       sendfile(Socket, Filename, 0, 0, []).
+
+-spec sendfile(ssl:sslsocket(), file:name_all() | file:fd(),
+               non_neg_integer(), non_neg_integer())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Socket, File, Offset, Bytes) ->
+       sendfile(Socket, File, Offset, Bytes, []).
+
+%% Unlike with TCP, no syscall can be used here, so sending files
+%% through SSL will be much slower in comparison. Note that unlike
+%% file:sendfile/5 this function accepts either a file or a file name.
+-spec sendfile(ssl:sslsocket(), file:name_all() | file:fd(),
+               non_neg_integer(), non_neg_integer(), ranch_transport:sendfile_opts())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Socket, File, Offset, Bytes, Opts) ->
+       ranch_transport:sendfile(?MODULE, Socket, File, Offset, Bytes, Opts).
+
+%% @todo Probably filter Opts?
+-spec setopts(ssl:sslsocket(), list()) -> ok | {error, atom()}.
+setopts(Socket, Opts) ->
+       ssl:setopts(Socket, Opts).
+
+-spec controlling_process(ssl:sslsocket(), pid())
+       -> ok | {error, closed | not_owner | atom()}.
+controlling_process(Socket, Pid) ->
+       ssl:controlling_process(Socket, Pid).
+
+-spec peername(ssl:sslsocket())
+       -> {ok, {inet:ip_address(), inet:port_number()}} | {error, atom()}.
+peername(Socket) ->
+       ssl:peername(Socket).
+
+-spec sockname(ssl:sslsocket())
+       -> {ok, {inet:ip_address(), inet:port_number()}} | {error, atom()}.
+sockname(Socket) ->
+       ssl:sockname(Socket).
+
+-spec shutdown(ssl:sslsocket(), read | write | read_write)
+       -> ok | {error, atom()}.
+shutdown(Socket, How) ->
+       ssl:shutdown(Socket, How).
+
+-spec close(ssl:sslsocket()) -> ok.
+close(Socket) ->
+       ssl:close(Socket).
+
+%% Internal.
+
+%% Unfortunately the implementation of elliptic-curve ciphers that has
+%% been introduced in R16B01 is incomplete.  Depending on the particular
+%% client, this can cause the TLS handshake to break during key
+%% agreement.  Depending on the ssl application version, this function
+%% returns a list of all cipher suites that are supported by default,
+%% minus the elliptic-curve ones.
+-spec unbroken_cipher_suites() -> [ssl:erl_cipher_suite()].
+unbroken_cipher_suites() ->
+       case proplists:get_value(ssl_app, ssl:versions()) of
+               Version when Version =:= "5.3"; Version =:= "5.3.1" ->
+                       lists:filter(fun(Suite) ->
+                               string:left(atom_to_list(element(1, Suite)), 4) =/= "ecdh"
+                       end, ssl:cipher_suites());
+               _ ->
+                       ssl:cipher_suites()
+       end.
similarity index 56%
rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/proper_SUITE.erl
rename to deps/ranch/src/ranch_sup.erl
index 440aa5f11e06f8334ffdb057e749282ad0f4684e..e078fd7a49b92336894831df86099ad3bae89997 100644 (file)
@@ -1,4 +1,4 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
 %%
 %% Permission to use, copy, modify, and/or distribute this software for any
 %% purpose with or without fee is hereby granted, provided that the above
 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
--module(proper_SUITE).
+-module(ranch_sup).
+-behaviour(supervisor).
 
--include_lib("common_test/include/ct.hrl").
+-export([start_link/0]).
+-export([init/1]).
 
--export([all/0, groups/0]). %% ct.
--export([dispatcher_split_host/1]). %% cowboy_dispatcher.
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
 
-%% ct.
-
-all() ->
-       [{group, dispatcher}].
-
-groups() ->
-       [{dispatcher, [], [dispatcher_split_host]}].
-
-%% cowboy_dispatcher.
-
-dispatcher_split_host(_Config) ->
-       true = proper:quickcheck(dispatcher_prop:prop_split_host_symmetric(),
-               [{on_output, fun(Format, Data) ->
-                       io:format(user, Format, Data), %% Console.
-                       io:format(Format, Data) %% Logs.
-               end}]).
+init([]) ->
+       ranch_server = ets:new(ranch_server, [
+               ordered_set, public, named_table]),
+       Procs = [
+               {ranch_server, {ranch_server, start_link, []},
+                       permanent, 5000, worker, [ranch_server]}
+       ],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
diff --git a/deps/ranch/src/ranch_tcp.erl b/deps/ranch/src/ranch_tcp.erl
new file mode 100644 (file)
index 0000000..e8429a5
--- /dev/null
@@ -0,0 +1,204 @@
+%% Copyright (c) 2011-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_tcp).
+-behaviour(ranch_transport).
+
+-export([name/0]).
+-export([secure/0]).
+-export([messages/0]).
+-export([listen/1]).
+-export([listen_options/0]).
+-export([accept/2]).
+-export([accept_ack/2]).
+-export([connect/3]).
+-export([connect/4]).
+-export([recv/3]).
+-export([send/2]).
+-export([sendfile/2]).
+-export([sendfile/4]).
+-export([sendfile/5]).
+-export([setopts/2]).
+-export([controlling_process/2]).
+-export([peername/1]).
+-export([sockname/1]).
+-export([shutdown/2]).
+-export([close/1]).
+
+-type opt() :: {backlog, non_neg_integer()}
+       | {buffer, non_neg_integer()}
+       | {delay_send, boolean()}
+       | {dontroute, boolean()}
+       | {exit_on_close, boolean()}
+       | {fd, non_neg_integer()}
+       | {high_msgq_watermark, non_neg_integer()}
+       | {high_watermark, non_neg_integer()}
+       | inet
+       | inet6
+       | {ip, inet:ip_address()}
+       | {keepalive, boolean()}
+       | {linger, {boolean(), non_neg_integer()}}
+       | {low_msgq_watermark, non_neg_integer()}
+       | {low_watermark, non_neg_integer()}
+       | {nodelay, boolean()}
+       | {port, inet:port_number()}
+       | {priority, integer()}
+       | {raw, non_neg_integer(), non_neg_integer(), binary()}
+       | {recbuf, non_neg_integer()}
+       | {send_timeout, timeout()}
+       | {send_timeout_close, boolean()}
+       | {sndbuf, non_neg_integer()}
+       | {tos, integer()}.
+-export_type([opt/0]).
+
+-type opts() :: [opt()].
+-export_type([opts/0]).
+
+name() -> tcp.
+
+-spec secure() -> boolean().
+secure() ->
+    false.
+
+messages() -> {tcp, tcp_closed, tcp_error}.
+
+-spec listen(opts()) -> {ok, inet:socket()} | {error, atom()}.
+listen(Opts) ->
+       Opts2 = ranch:set_option_default(Opts, backlog, 1024),
+       Opts3 = ranch:set_option_default(Opts2, nodelay, true),
+       Opts4 = ranch:set_option_default(Opts3, send_timeout, 30000),
+       Opts5 = ranch:set_option_default(Opts4, send_timeout_close, true),
+       %% We set the port to 0 because it is given in the Opts directly.
+       %% The port in the options takes precedence over the one in the
+       %% first argument.
+       gen_tcp:listen(0, ranch:filter_options(Opts5, listen_options(),
+               [binary, {active, false}, {packet, raw}, {reuseaddr, true}])).
+
+%% 'inet' and 'inet6' are also allowed but they are handled
+%% specifically as they do not have 2-tuple equivalents.
+%%
+%% The 4-tuple 'raw' option is also handled specifically.
+listen_options() ->
+       [backlog, buffer, delay_send, dontroute, exit_on_close, fd,
+               high_msgq_watermark, high_watermark, ip,
+               keepalive, linger, low_msgq_watermark,
+               low_watermark, nodelay, port, priority, recbuf,
+               send_timeout, send_timeout_close, sndbuf, tos].
+
+-spec accept(inet:socket(), timeout())
+       -> {ok, inet:socket()} | {error, closed | timeout | atom()}.
+accept(LSocket, Timeout) ->
+       gen_tcp:accept(LSocket, Timeout).
+
+-spec accept_ack(inet:socket(), timeout()) -> ok.
+accept_ack(_, _) ->
+       ok.
+
+%% @todo Probably filter Opts?
+-spec connect(inet:ip_address() | inet:hostname(),
+       inet:port_number(), any())
+       -> {ok, inet:socket()} | {error, atom()}.
+connect(Host, Port, Opts) when is_integer(Port) ->
+       gen_tcp:connect(Host, Port,
+               Opts ++ [binary, {active, false}, {packet, raw}]).
+
+%% @todo Probably filter Opts?
+-spec connect(inet:ip_address() | inet:hostname(),
+       inet:port_number(), any(), timeout())
+       -> {ok, inet:socket()} | {error, atom()}.
+connect(Host, Port, Opts, Timeout) when is_integer(Port) ->
+       gen_tcp:connect(Host, Port,
+               Opts ++ [binary, {active, false}, {packet, raw}],
+               Timeout).
+
+-spec recv(inet:socket(), non_neg_integer(), timeout())
+       -> {ok, any()} | {error, closed | atom()}.
+recv(Socket, Length, Timeout) ->
+       gen_tcp:recv(Socket, Length, Timeout).
+
+-spec send(inet:socket(), iodata()) -> ok | {error, atom()}.
+send(Socket, Packet) ->
+       gen_tcp:send(Socket, Packet).
+
+-spec sendfile(inet:socket(), file:name_all() | file:fd())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Socket, Filename) ->
+       sendfile(Socket, Filename, 0, 0, []).
+
+-spec sendfile(inet:socket(), file:name_all() | file:fd(), non_neg_integer(),
+               non_neg_integer())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Socket, File, Offset, Bytes) ->
+       sendfile(Socket, File, Offset, Bytes, []).
+
+-spec sendfile(inet:socket(), file:name_all() | file:fd(), non_neg_integer(),
+               non_neg_integer(), [{chunk_size, non_neg_integer()}])
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Socket, Filename, Offset, Bytes, Opts)
+               when is_list(Filename) orelse is_atom(Filename)
+               orelse is_binary(Filename) ->
+       case file:open(Filename, [read, raw, binary]) of
+               {ok, RawFile} ->
+                       try sendfile(Socket, RawFile, Offset, Bytes, Opts) of
+                               Result -> Result
+                       after
+                               ok = file:close(RawFile)
+                       end;
+               {error, _} = Error ->
+                       Error
+       end;
+sendfile(Socket, RawFile, Offset, Bytes, Opts) ->
+       Opts2 = case Opts of
+               [] -> [{chunk_size, 16#1FFF}];
+               _ -> Opts
+       end,
+       try file:sendfile(RawFile, Socket, Offset, Bytes, Opts2) of
+               Result -> Result
+       catch
+               error:{badmatch, {error, enotconn}} ->
+                       %% file:sendfile/5 might fail by throwing a
+                       %% {badmatch, {error, enotconn}}. This is because its
+                       %% implementation fails with a badmatch in
+                       %% prim_file:sendfile/10 if the socket is not connected.
+                       {error, closed}
+       end.
+
+%% @todo Probably filter Opts?
+-spec setopts(inet:socket(), list()) -> ok | {error, atom()}.
+setopts(Socket, Opts) ->
+       inet:setopts(Socket, Opts).
+
+-spec controlling_process(inet:socket(), pid())
+       -> ok | {error, closed | not_owner | atom()}.
+controlling_process(Socket, Pid) ->
+       gen_tcp:controlling_process(Socket, Pid).
+
+-spec peername(inet:socket())
+       -> {ok, {inet:ip_address(), inet:port_number()}} | {error, atom()}.
+peername(Socket) ->
+       inet:peername(Socket).
+
+-spec sockname(inet:socket())
+       -> {ok, {inet:ip_address(), inet:port_number()}} | {error, atom()}.
+sockname(Socket) ->
+       inet:sockname(Socket).
+
+-spec shutdown(inet:socket(), read | write | read_write)
+       -> ok | {error, atom()}.
+shutdown(Socket, How) ->
+       gen_tcp:shutdown(Socket, How).
+
+-spec close(inet:socket()) -> ok.
+close(Socket) ->
+       gen_tcp:close(Socket).
diff --git a/deps/ranch/src/ranch_transport.erl b/deps/ranch/src/ranch_transport.erl
new file mode 100644 (file)
index 0000000..873bffe
--- /dev/null
@@ -0,0 +1,141 @@
+%% Copyright (c) 2012-2015, Loïc Hoguin <essen@ninenines.eu>
+%%
+%% Permission to use, copy, modify, and/or distribute this software for any
+%% purpose with or without fee is hereby granted, provided that the above
+%% copyright notice and this permission notice appear in all copies.
+%%
+%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-module(ranch_transport).
+
+-export([sendfile/6]).
+
+-type socket() :: any().
+-type opts() :: any().
+-type sendfile_opts() :: [{chunk_size, non_neg_integer()}].
+-export_type([sendfile_opts/0]).
+
+-callback name() -> atom().
+-callback secure() -> boolean().
+-callback messages() -> {OK::atom(), Closed::atom(), Error::atom()}.
+-callback listen(opts()) -> {ok, socket()} | {error, atom()}.
+-callback accept(socket(), timeout())
+       -> {ok, socket()} | {error, closed | timeout | atom()}.
+-callback accept_ack(socket(), timeout()) -> ok.
+-callback connect(string(), inet:port_number(), opts())
+       -> {ok, socket()} | {error, atom()}.
+-callback connect(string(), inet:port_number(), opts(), timeout())
+       -> {ok, socket()} | {error, atom()}.
+-callback recv(socket(), non_neg_integer(), timeout())
+       -> {ok, any()} | {error, closed | timeout | atom()}.
+-callback send(socket(), iodata()) -> ok | {error, atom()}.
+-callback sendfile(socket(), file:name() | file:fd())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+-callback sendfile(socket(), file:name() | file:fd(), non_neg_integer(),
+               non_neg_integer()) -> {ok, non_neg_integer()} | {error, atom()}.
+-callback sendfile(socket(), file:name() | file:fd(), non_neg_integer(),
+               non_neg_integer(), sendfile_opts())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+-callback setopts(socket(), opts()) -> ok | {error, atom()}.
+-callback controlling_process(socket(), pid())
+       -> ok | {error, closed | not_owner | atom()}.
+-callback peername(socket())
+       -> {ok, {inet:ip_address(), inet:port_number()}} | {error, atom()}.
+-callback sockname(socket())
+       -> {ok, {inet:ip_address(), inet:port_number()}} | {error, atom()}.
+-callback shutdown(socket(), read | write | read_write)
+       -> ok | {error, atom()}.
+-callback close(socket()) -> ok.
+
+%% A fallback for transports that don't have a native sendfile implementation.
+%% Note that the ordering of arguments is different from file:sendfile/5 and
+%% that this function accepts either a raw file or a file name.
+-spec sendfile(module(), socket(), file:filename_all() | file:fd(),
+               non_neg_integer(), non_neg_integer(), sendfile_opts())
+       -> {ok, non_neg_integer()} | {error, atom()}.
+sendfile(Transport, Socket, Filename, Offset, Bytes, Opts)
+               when is_list(Filename) orelse is_atom(Filename)
+               orelse is_binary(Filename) ->
+       ChunkSize = chunk_size(Opts),
+       case file:open(Filename, [read, raw, binary]) of
+               {ok, RawFile} ->
+                       _ = case Offset of
+                               0 ->
+                                       ok;
+                               _ ->
+                                       {ok, _} = file:position(RawFile, {bof, Offset})
+                       end,
+                       try
+                               sendfile_loop(Transport, Socket, RawFile, Bytes, 0, ChunkSize)
+                       after
+                               ok = file:close(RawFile)
+                       end;
+               {error, _Reason} = Error ->
+                       Error
+       end;
+sendfile(Transport, Socket, RawFile, Offset, Bytes, Opts) ->
+       ChunkSize = chunk_size(Opts),
+       Initial2 = case file:position(RawFile, {cur, 0}) of
+               {ok, Offset} ->
+                       Offset;
+               {ok, Initial} ->
+                       {ok, _} = file:position(RawFile, {bof, Offset}),
+                       Initial
+               end,
+       case sendfile_loop(Transport, Socket, RawFile, Bytes, 0, ChunkSize) of
+               {ok, _Sent} = Result ->
+                       {ok, _} = file:position(RawFile, {bof, Initial2}),
+                       Result;
+               {error, _Reason} = Error ->
+                       Error
+       end.
+
+-spec chunk_size(sendfile_opts()) -> pos_integer().
+chunk_size(Opts) ->
+       case lists:keyfind(chunk_size, 1, Opts) of
+               {chunk_size, ChunkSize}
+                               when is_integer(ChunkSize) andalso ChunkSize > 0 ->
+                       ChunkSize;
+               {chunk_size, 0} ->
+                       16#1FFF;
+               false ->
+                       16#1FFF
+       end.
+
+-spec sendfile_loop(module(), socket(), file:fd(), non_neg_integer(),
+               non_neg_integer(), pos_integer())
+       -> {ok, non_neg_integer()} | {error, term()}.
+sendfile_loop(_Transport, _Socket, _RawFile, Sent, Sent, _ChunkSize)
+               when Sent =/= 0 ->
+       %% All requested data has been read and sent, return number of bytes sent.
+       {ok, Sent};
+sendfile_loop(Transport, Socket, RawFile, Bytes, Sent, ChunkSize) ->
+       ReadSize = read_size(Bytes, Sent, ChunkSize),
+       case file:read(RawFile, ReadSize) of
+               {ok, IoData} ->
+                       case Transport:send(Socket, IoData) of
+                               ok ->
+                                       Sent2 = iolist_size(IoData) + Sent,
+                                       sendfile_loop(Transport, Socket, RawFile, Bytes, Sent2,
+                                               ChunkSize);
+                               {error, _Reason} = Error ->
+                                       Error
+                       end;
+               eof ->
+                       {ok, Sent};
+               {error, _Reason} = Error ->
+                       Error
+       end.
+
+-spec read_size(non_neg_integer(), non_neg_integer(), non_neg_integer()) ->
+       non_neg_integer().
+read_size(0, _Sent, ChunkSize) ->
+       ChunkSize;
+read_size(Bytes, Sent, ChunkSize) ->
+       min(Bytes - Sent, ChunkSize).
similarity index 73%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/COPYING
rename to deps/sockjs/COPYING
index 4f1c4632259076a4edced8c77ec7fe7bcf31483e..1168d24373aedacf1e1951045842cebe7a10b903 100644 (file)
@@ -5,6 +5,8 @@ with the exception of following files:
    from Mochiweb project (https://github.com/mochi/mochiweb) and
    covered by LICENSE-MIT-Mochiweb.
 
+ * src/sockjs_pmod_pt.erl, which is from OTP's release of pmod_transform
+   (https://github.com/erlang/pmod_transform) and covered by LICENSE-EPL-OTP.
+
  * rebar, which is a compiled binary from Rebar project
    (https://github.com/basho/rebar) and covered by LICENSE-APL2-Rebar.
-
diff --git a/deps/sockjs/LICENSE-EPL-OTP b/deps/sockjs/LICENSE-EPL-OTP
new file mode 100644 (file)
index 0000000..2257751
--- /dev/null
@@ -0,0 +1,286 @@
+ERLANG PUBLIC LICENSE
+Version 1.1
+
+1. Definitions.
+
+1.1. ``Contributor'' means each entity that creates or contributes to
+the creation of Modifications.
+
+1.2. ``Contributor Version'' means the combination of the Original
+Code, prior Modifications used by a Contributor, and the Modifications
+made by that particular Contributor.
+
+1.3. ``Covered Code'' means the Original Code or Modifications or the
+combination of the Original Code and Modifications, in each case
+including portions thereof.
+
+1.4. ``Electronic Distribution Mechanism'' means a mechanism generally
+accepted in the software development community for the electronic
+transfer of data.
+
+1.5. ``Executable'' means Covered Code in any form other than Source
+Code.
+
+1.6. ``Initial Developer'' means the individual or entity identified
+as the Initial Developer in the Source Code notice required by Exhibit
+A.
+
+1.7. ``Larger Work'' means a work which combines Covered Code or
+portions thereof with code not governed by the terms of this License.
+
+1.8. ``License'' means this document.
+
+1.9. ``Modifications'' means any addition to or deletion from the
+substance or structure of either the Original Code or any previous
+Modifications. When Covered Code is released as a series of files, a
+Modification is:
+
+A. Any addition to or deletion from the contents of a file containing
+   Original Code or previous Modifications. 
+
+B. Any new file that contains any part of the Original Code or
+   previous Modifications. 
+
+1.10. ``Original Code'' means Source Code of computer software code
+which is described in the Source Code notice required by Exhibit A as
+Original Code, and which, at the time of its release under this
+License is not already Covered Code governed by this License.
+
+1.11. ``Source Code'' means the preferred form of the Covered Code for
+making modifications to it, including all modules it contains, plus
+any associated interface definition files, scripts used to control
+compilation and installation of an Executable, or a list of source
+code differential comparisons against either the Original Code or
+another well known, available Covered Code of the Contributor's
+choice. The Source Code can be in a compressed or archival form,
+provided the appropriate decompression or de-archiving software is
+widely available for no charge.
+
+1.12. ``You'' means an individual or a legal entity exercising rights
+under, and complying with all of the terms of, this License. For legal
+entities,``You'' includes any entity which controls, is controlled by,
+or is under common control with You. For purposes of this definition,
+``control'' means (a) the power, direct or indirect, to cause the
+direction or management of such entity, whether by contract or
+otherwise, or (b) ownership of fifty percent (50%) or more of the
+outstanding shares or beneficial ownership of such entity.
+
+2. Source Code License.
+
+2.1. The Initial Developer Grant.
+The Initial Developer hereby grants You a world-wide, royalty-free,
+non-exclusive license, subject to third party intellectual property
+claims:
+
+(a) to use, reproduce, modify, display, perform, sublicense and
+    distribute the Original Code (or portions thereof) with or without
+    Modifications, or as part of a Larger Work; and 
+
+(b) under patents now or hereafter owned or controlled by Initial
+    Developer, to make, have made, use and sell (``Utilize'') the
+    Original Code (or portions thereof), but solely to the extent that
+    any such patent is reasonably necessary to enable You to Utilize
+    the Original Code (or portions thereof) and not to any greater
+    extent that may be necessary to Utilize further Modifications or
+    combinations. 
+
+2.2. Contributor Grant.
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license, subject to third party intellectual property
+claims:
+
+(a) to use, reproduce, modify, display, perform, sublicense and
+    distribute the Modifications created by such Contributor (or
+    portions thereof) either on an unmodified basis, with other
+    Modifications, as Covered Code or as part of a Larger Work; and 
+
+(b) under patents now or hereafter owned or controlled by Contributor,
+    to Utilize the Contributor Version (or portions thereof), but
+    solely to the extent that any such patent is reasonably necessary
+    to enable You to Utilize the Contributor Version (or portions
+    thereof), and not to any greater extent that may be necessary to
+    Utilize further Modifications or combinations. 
+
+3. Distribution Obligations.
+
+3.1. Application of License.
+The Modifications which You contribute are governed by the terms of
+this License, including without limitation Section 2.2. The Source
+Code version of Covered Code may be distributed only under the terms
+of this License, and You must include a copy of this License with
+every copy of the Source Code You distribute. You may not offer or
+impose any terms on any Source Code version that alters or restricts
+the applicable version of this License or the recipients' rights
+hereunder. However, You may include an additional document offering
+the additional rights described in Section 3.5. 
+
+3.2. Availability of Source Code.
+Any Modification which You contribute must be made available in Source
+Code form under the terms of this License either on the same media as
+an Executable version or via an accepted Electronic Distribution
+Mechanism to anyone to whom you made an Executable version available;
+and if made available via Electronic Distribution Mechanism, must
+remain available for at least twelve (12) months after the date it
+initially became available, or at least six (6) months after a
+subsequent version of that particular Modification has been made
+available to such recipients. You are responsible for ensuring that
+the Source Code version remains available even if the Electronic
+Distribution Mechanism is maintained by a third party.
+
+3.3. Description of Modifications.
+You must cause all Covered Code to which you contribute to contain a
+file documenting the changes You made to create that Covered Code and
+the date of any change. You must include a prominent statement that
+the Modification is derived, directly or indirectly, from Original
+Code provided by the Initial Developer and including the name of the
+Initial Developer in (a) the Source Code, and (b) in any notice in an
+Executable version or related documentation in which You describe the
+origin or ownership of the Covered Code.
+
+3.4. Intellectual Property Matters
+
+(a) Third Party Claims.
+    If You have knowledge that a party claims an intellectual property
+    right in particular functionality or code (or its utilization
+    under this License), you must include a text file with the source
+    code distribution titled ``LEGAL'' which describes the claim and
+    the party making the claim in sufficient detail that a recipient
+    will know whom to contact. If you obtain such knowledge after You
+    make Your Modification available as described in Section 3.2, You
+    shall promptly modify the LEGAL file in all copies You make
+    available thereafter and shall take other steps (such as notifying
+    appropriate mailing lists or newsgroups) reasonably calculated to
+    inform those who received the Covered Code that new knowledge has
+    been obtained. 
+
+(b) Contributor APIs.
+    If Your Modification is an application programming interface and
+    You own or control patents which are reasonably necessary to
+    implement that API, you must also include this information in the
+    LEGAL file. 
+
+3.5. Required Notices.
+You must duplicate the notice in Exhibit A in each file of the Source
+Code, and this License in any documentation for the Source Code, where
+You describe recipients' rights relating to Covered Code. If You
+created one or more Modification(s), You may add your name as a
+Contributor to the notice described in Exhibit A. If it is not
+possible to put such notice in a particular Source Code file due to
+its structure, then you must include such notice in a location (such
+as a relevant directory file) where a user would be likely to look for
+such a notice. You may choose to offer, and to charge a fee for,
+warranty, support, indemnity or liability obligations to one or more
+recipients of Covered Code. However, You may do so only on Your own
+behalf, and not on behalf of the Initial Developer or any
+Contributor. You must make it absolutely clear than any such warranty,
+support, indemnity or liability obligation is offered by You alone,
+and You hereby agree to indemnify the Initial Developer and every
+Contributor for any liability incurred by the Initial Developer or
+such Contributor as a result of warranty, support, indemnity or
+liability terms You offer.
+
+3.6. Distribution of Executable Versions.
+You may distribute Covered Code in Executable form only if the
+requirements of Section 3.1-3.5 have been met for that Covered Code,
+and if You include a notice stating that the Source Code version of
+the Covered Code is available under the terms of this License,
+including a description of how and where You have fulfilled the
+obligations of Section 3.2. The notice must be conspicuously included
+in any notice in an Executable version, related documentation or
+collateral in which You describe recipients' rights relating to the
+Covered Code. You may distribute the Executable version of Covered
+Code under a license of Your choice, which may contain terms different
+from this License, provided that You are in compliance with the terms
+of this License and that the license for the Executable version does
+not attempt to limit or alter the recipient's rights in the Source
+Code version from the rights set forth in this License. If You
+distribute the Executable version under a different license You must
+make it absolutely clear that any terms which differ from this License
+are offered by You alone, not by the Initial Developer or any
+Contributor. You hereby agree to indemnify the Initial Developer and
+every Contributor for any liability incurred by the Initial Developer
+or such Contributor as a result of any such terms You offer.
+
+3.7. Larger Works.
+You may create a Larger Work by combining Covered Code with other code
+not governed by the terms of this License and distribute the Larger
+Work as a single product. In such a case, You must make sure the
+requirements of this License are fulfilled for the Covered Code.
+
+4. Inability to Comply Due to Statute or Regulation.
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Code due to statute
+or regulation then You must: (a) comply with the terms of this License
+to the maximum extent possible; and (b) describe the limitations and
+the code they affect. Such description must be included in the LEGAL
+file described in Section 3.4 and must be included with all
+distributions of the Source Code. Except to the extent prohibited by
+statute or regulation, such description must be sufficiently detailed
+for a recipient of ordinary skill to be able to understand it.
+
+5. Application of this License.
+
+This License applies to code to which the Initial Developer has
+attached the notice in Exhibit A, and to related Covered Code.
+
+6. CONNECTION TO MOZILLA PUBLIC LICENSE
+
+This Erlang License is a derivative work of the Mozilla Public
+License, Version 1.0. It contains terms which differ from the Mozilla
+Public License, Version 1.0.
+
+7. DISCLAIMER OF WARRANTY.
+
+COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN ``AS IS'' BASIS,
+WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
+WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF
+DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR
+NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF
+THE COVERED CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE
+IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER
+CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR
+CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART
+OF THIS LICENSE. NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER
+EXCEPT UNDER THIS DISCLAIMER.
+
+8. TERMINATION.
+This License and the rights granted hereunder will terminate
+automatically if You fail to comply with terms herein and fail to cure
+such breach within 30 days of becoming aware of the breach. All
+sublicenses to the Covered Code which are properly granted shall
+survive any termination of this License. Provisions which, by their
+nature, must remain in effect beyond the termination of this License
+shall survive.
+
+9. DISCLAIMER OF LIABILITY
+Any utilization of Covered Code shall not cause the Initial Developer
+or any Contributor to be liable for any damages (neither direct nor
+indirect).
+
+10. MISCELLANEOUS
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision is held to be unenforceable, such
+provision shall be reformed only to the extent necessary to make it
+enforceable. This License shall be construed by and in accordance with
+the substantive laws of Sweden. Any dispute, controversy or claim
+arising out of or relating to this License, or the breach, termination
+or invalidity thereof, shall be subject to the exclusive jurisdiction
+of Swedish courts, with the Stockholm City Court as the first
+instance.
+       
+EXHIBIT A.
+
+``The contents of this file are subject to the Erlang Public License,
+Version 1.1, (the "License"); you may not use this file except in
+compliance with the License. You should have received a copy of the
+Erlang Public License along with this software. If not, it can be
+retrieved via the world wide web at http://www.erlang.org/.
+
+Software distributed under the License is distributed on an "AS IS"
+basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+the License for the specific language governing rights and limitations
+under the License.
+
+The Initial Developer of the Original Code is Ericsson AB.
+Portions created by Ericsson are Copyright 2013, Ericsson AB.
+All Rights Reserved.''
diff --git a/deps/sockjs/LICENSE-MIT-Mochiweb b/deps/sockjs/LICENSE-MIT-Mochiweb
new file mode 100644 (file)
index 0000000..7b7c506
--- /dev/null
@@ -0,0 +1,22 @@
+This is the MIT license.
+
+Copyright (c) 2007 Mochi Media, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
similarity index 96%
rename from rabbitmq-server/plugins-src/eldap-wrapper/LICENSE-MIT-eldap
rename to deps/sockjs/LICENSE-MIT-SockJS
index 1f6200918f7b8b90047ae33eac3c175507dd76b1..a89716714a4612336b39c0e3a073cc28e83be409 100644 (file)
@@ -1,6 +1,5 @@
+Copyright (C) 2011 VMware, Inc.
 
-Copyright (c) 2010, Torbjorn Tornkvist
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
 in the Software without restriction, including without limitation the rights
@@ -18,4 +17,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
-
diff --git a/deps/sockjs/Makefile b/deps/sockjs/Makefile
new file mode 100644 (file)
index 0000000..0e10035
--- /dev/null
@@ -0,0 +1,26 @@
+IGNORE_DEPS += edown eper eunit_formatters meck node_package rebar_lock_deps_plugin rebar_vsn_plugin reltool_util
+C_SRC_DIR = /path/do/not/exist
+C_SRC_TYPE = rebar
+DRV_CFLAGS = -fPIC
+export DRV_CFLAGS
+ERLANG_ARCH = 64
+export ERLANG_ARCH
+ERLC_OPTS = +debug_info
+export ERLC_OPTS
+
+DEPS += cowboy
+dep_cowboy = git https://github.com/ninenines/cowboy.git 1.0.3
+COMPILE_FIRST +=
+
+
+rebar_dep: preprocess pre-deps deps pre-app app
+
+preprocess::
+
+pre-deps::
+
+pre-app::
+
+include ../../erlang.mk
+
+ERLC_OPTS += $(SOCKJS_ERLC_OPTS)
similarity index 69%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/README.md
rename to deps/sockjs/README.md
index 6c639175801cd7d1001e1434ea085b9fdaea7b98..96f2035549ac7ff529869dfd2c78a3ef36e82631 100644 (file)
@@ -28,25 +28,31 @@ like this:
 
 ```erlang
 main(_) ->
-    application:start(sockjs),
-    application:start(cowboy),
+    ok = application:start(xmerl),
+    ok = application:start(sockjs),
+    ok = application:start(ranch),
+    ok = application:start(crypto),
+    ok = application:start(cowlib),
+    ok = application:start(cowboy),
 
     SockjsState = sockjs_handler:init_state(
                     <<"/echo">>, fun service_echo/3, state, []),
 
-    Routes = [{'_',  [{[<<"echo">>, '...'],
+    Routes = [{'_',  [{<<"/echo/[...]">>,
                        sockjs_cowboy_handler, SockjsState}]}],
+    Dispatch = cowboy_router:compile(Routes),
 
-    cowboy:start_listener(http, 100,
-                          cowboy_tcp_transport, [{port,     8081}],
-                          cowboy_http_protocol, [{dispatch, Routes}]),
+    cowboy:start_http(cowboy_test_http_listener, 100,
+                      [{port, 8081}],
+                      [{env, [{dispatch, Dispatch}]}]),
     receive
         _ -> ok
     end.
 
-service_echo(_Conn, init, state)        -> {ok, state};
-service_echo(Conn, {recv, Data}, state) -> Conn:send(Data);
-service_echo(_Conn, closed, state)      -> {ok, state}.
+service_echo(_Conn, init, state)          -> {ok, state};
+service_echo(Conn, {recv, Data}, state)   -> Conn:send(Data); % or: sockjs:send(Data, Conn)
+service_echo(_Conn, {info, _Info}, state) -> {ok, state};
+service_echo(_Conn, closed, state)        -> {ok, state}.
 ```
 
 Dig into the `examples` directory to get working code:
@@ -77,7 +83,7 @@ SockJS-erlang API
 Except for the web framework-specific API's, SockJS-erlang is rather
 simple. It has just a couple of methods:
 
- * **sockjs_handler:init_state(prefix, callback, state, options) -> service()**
+ * **sockjs_handler:init_state(Prefix, Callback, State, Options) -> service()**
 
     Initializes the state of a SockJS service (ie: a thing you can
     access from the browser, it has an url and a code on the server
@@ -117,18 +123,22 @@ simple. It has just a couple of methods:
     For more explanation, please do take a look at
     [SockJS-node readme](https://github.com/sockjs/sockjs-node/blob/master/README.md).
 
- * **Connection:send(payload) -> ok**
+ * **Connection:send(Payload) -> ok**
+ * **sockjs:send(Payload, Connection) -> ok**
 
      Send data over an active SockJS connection. Payload should be of
      iodata() type. Messages sent after connection gets closed will be
      lost.
 
- * **Connection:close(code, reason) -> ok**
+ * **Connection:close(Code, Reason) -> ok**
+ * **sockjs:close(Code, Reason, Connection) -> ok**
+ * **sockjs:close(Connection) -> ok**
 
      Close an active SockJS connection with code and reason. If code
      and reason are skipped, the defaults are used.
 
  * **Connection:info() -> proplist()**
+ * **sockjs:info(Connection) -> proplist()**
 
      Sometimes you may want to know more about the underlying
      connection. This method returns a proplist with few attributes
@@ -150,6 +160,62 @@ to explain how to use them, please take a look at the examples.
  * **sockjs_handler:handle_ws(service(), req()) -> req()**
 
 
+What's news in this fork?
+-------------------------
+
+### API for multiplexing
+
+ * **sockjs:send(Payload, Channel) -> ok**
+
+     Send data over a channel. Payload should be of iodata() type. Messages
+     sent after connection gets closed will be lost.
+
+ * **sockjs:close(Code, Reason, Channel) -> ok**
+ * **sockjs:close(Channel) -> ok**
+
+     Close a channel with code and reason. If code and reason are skipped,
+     the defaults are used. But actually, code and reason are not sent to
+     client.
+
+ * **sockjs:info(Channel) -> proplist()**
+
+     Sometimes you may want to know more about the underlying
+     connection. This method returns a proplist with few attributes
+     extracted from the first HTTP/websocket request that was coming
+     to this connection. You should see:
+
+       * peername - ip address and port of the remote host
+       * sockname - ip address and port of the local endpoint
+       * path - the path used by the request that started the connection
+       * headers - a set of headers extracted from the request that
+         may be handy (don't expect to retrieve Cookie header).
+
+ * **sockjs:to_session(Channel) -> conn()**
+
+     Convert a channel to connection.
+
+ * **sockjs:to_channel(Conn, Topic) -> channel()**
+
+     Convert a connection to channel with specific topic (channel name).
+
+ * **sockjs_multiplex:init_state(Services, {AuthenCallback, Options})**
+
+     Sometimes you don't want client access directly your channel
+     services and you want to do something with client first. If you
+     use authentication callback, you can decide when allow client to
+     use your services. It's quite simple, see [example](https://github.com/trubavuong/sockjs-erlang/blob/master/examples/multiplex/cowboy_multiplex_authen_callback.erl).
+     Valid authentication callback options:
+
+       * `{state, list()}` - initial state of authentication callback
+
+     If you do not use multiplexing, you can also implement this
+     mechanism, see [example](https://github.com/trubavuong/sockjs-erlang/blob/master/examples/cowboy_echo_authen_callback.erl).
+
+ * **sockjs_multiplex:init_state(Services)**
+
+     Initialize state without authentication callback.
+
+
 Stability
 ---------
 
diff --git a/deps/sockjs/examples/cowboy_echo.erl b/deps/sockjs/examples/cowboy_echo.erl
new file mode 100755 (executable)
index 0000000..44307cb
--- /dev/null
@@ -0,0 +1,56 @@
+#!/usr/bin/env escript
+%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input
+-module(cowboy_echo).
+-mode(compile).
+
+-export([main/1]).
+
+%% Cowboy callbacks
+-export([init/3, handle/2, terminate/3]).
+
+
+main(_) ->
+    Port = 8081,
+    ok = application:start(xmerl),
+    ok = application:start(sockjs),
+    ok = application:start(ranch),
+    ok = application:start(crypto),
+    ok = application:start(cowlib),
+    ok = application:start(cowboy),
+
+    SockjsState = sockjs_handler:init_state(
+                    <<"/echo">>, fun service_echo/3, state, []),
+
+    VhostRoutes = [{<<"/echo/[...]">>, sockjs_cowboy_handler, SockjsState},
+                   {'_', ?MODULE, []}],
+    Routes = [{'_',  VhostRoutes}], % any vhost
+    Dispatch = cowboy_router:compile(Routes),
+
+    io:format(" [*] Running at http://localhost:~p~n", [Port]),
+    cowboy:start_http(cowboy_echo_http_listener, 100,
+                      [{port, Port}],
+                      [{env, [{dispatch, Dispatch}]}]),
+    receive
+        _ -> ok
+    end.
+
+%% --------------------------------------------------------------------------
+
+init({_Any, http}, Req, []) ->
+    {ok, Req, []}.
+
+handle(Req, State) ->
+    {ok, Data} = file:read_file("./examples/echo.html"),
+    {ok, Req1} = cowboy_req:reply(200, [{<<"Content-Type">>, "text/html"}],
+                                       Data, Req),
+    {ok, Req1, State}.
+
+terminate(_Reason, _Req, _State) ->
+    ok.
+
+%% --------------------------------------------------------------------------
+
+service_echo(_Conn, init, state)          -> {ok, state};
+service_echo(Conn, {recv, Data}, state)   -> sockjs:send(Data, Conn);
+service_echo(_Conn, {info, _Info}, state) -> {ok, state};
+service_echo(_Conn, closed, state)        -> {ok, state}.
diff --git a/deps/sockjs/examples/cowboy_echo_authen_callback.erl b/deps/sockjs/examples/cowboy_echo_authen_callback.erl
new file mode 100755 (executable)
index 0000000..612a6f9
--- /dev/null
@@ -0,0 +1,86 @@
+#!/usr/bin/env escript
+%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input
+-module(cowboy_echo).
+-mode(compile).
+
+-export([main/1]).
+
+%% Cowboy callbacks
+-export([init/3, handle/2, terminate/3]).
+
+
+main(_) ->
+    Port = 8081,
+    ok = application:start(xmerl),
+    ok = application:start(sockjs),
+    ok = application:start(ranch),
+    ok = application:start(crypto),
+    ok = application:start(cowlib),
+    ok = application:start(cowboy),
+
+    SockjsState = sockjs_handler:init_state(
+                    <<"/echo">>, fun service_echo/3, [], []),
+
+    VhostRoutes = [{<<"/echo/[...]">>, sockjs_cowboy_handler, SockjsState},
+                   {'_', ?MODULE, []}],
+    Routes = [{'_',  VhostRoutes}], % any vhost
+    Dispatch = cowboy_router:compile(Routes),
+
+    io:format(" [*] Running at http://localhost:~p~n", [Port]),
+    cowboy:start_http(cowboy_echo_http_listener, 100,
+                      [{port, Port}],
+                      [{env, [{dispatch, Dispatch}]}]),
+    receive
+        _ -> ok
+    end.
+
+%% --------------------------------------------------------------------------
+
+init({_Any, http}, Req, []) ->
+    {ok, Req, []}.
+
+handle(Req, State) ->
+    {ok, Data} = file:read_file("./examples/echo_authen_callback.html"),
+    {ok, Req1} = cowboy_req:reply(200, [{<<"Content-Type">>, "text/html"}],
+                                       Data, Req),
+    {ok, Req1, State}.
+
+terminate(_Reason, _Req, _State) ->
+    ok.
+
+%% --------------------------------------------------------------------------
+
+authen(Conn, init, State) ->
+    {ok, TRef} = timer:apply_after(5000, sockjs, close, [Conn]),
+    {ok, [TRef | State]};
+authen(Conn, {recv, Data}, [TRef | State] = State1) ->
+    case Data of
+        <<"auth">> ->
+            sockjs:send(<<"Authenticate successfully!">>, Conn),
+            timer:cancel(TRef),
+            {success, [{user_id, element(3, erlang:now())} | State]};
+        _Else ->
+            {ok, State1}
+    end;
+authen(_Conn, closed, [TRef | State]) ->
+    timer:cancel(TRef),
+    {ok, State}.
+
+service_echo(Conn, init, State) ->
+    authen(Conn, init, State);
+service_echo(Conn, {recv, Data}, State) ->
+    case lists:keyfind(user_id, 1, State) of
+        {user_id, UserId} ->
+            sockjs:send([Data, " from ", erlang:integer_to_binary(UserId)], Conn);
+        false ->
+            case authen(Conn, {recv, Data}, State) of
+                {success, State1} ->
+                    {ok, State1};
+                Else ->
+                    Else
+            end
+    end;
+service_echo(_Conn, {info, _Info}, State) ->
+    {ok, State};
+service_echo(Conn, closed, State) ->
+    authen(Conn, closed, State).
similarity index 73%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/cowboy_test_server.erl
rename to deps/sockjs/examples/cowboy_test_server.erl
index 72a09a84e8b63555f28d13d1e0e4ac6e2473fc5c..87b7e62d0817aaebbe11d9a91bcdad3448c16141 100755 (executable)
@@ -1,5 +1,5 @@
 #!/usr/bin/env escript
-%%! -smp disable +A1 +K true -pa ebin deps/cowboy/ebin -input
+%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input
 -module(cowboy_test_server).
 -mode(compile).
 
 
 main(_) ->
     Port = 8081,
-    application:start(sockjs),
-    application:start(cowboy),
+    ok = application:start(xmerl),
+    ok = application:start(sockjs),
+    ok = application:start(ranch),
+    ok = application:start(crypto),
+    ok = application:start(cowlib),
+    ok = application:start(cowboy),
 
     StateEcho = sockjs_handler:init_state(
                   <<"/echo">>, fun service_echo/3, state,
@@ -30,21 +34,23 @@ main(_) ->
                     <<"/cookie_needed_echo">>, fun service_echo/3, state,
                     [{cookie_needed, true}]),
 
-    VRoutes = [{[<<"echo">>, '...'], sockjs_cowboy_handler, StateEcho},
-               {[<<"close">>, '...'], sockjs_cowboy_handler, StateClose},
-               {[<<"amplify">>, '...'], sockjs_cowboy_handler, StateAmplify},
-               {[<<"broadcast">>, '...'], sockjs_cowboy_handler, StateBroadcast},
-               {[<<"disabled_websocket_echo">>, '...'], sockjs_cowboy_handler,
+    VRoutes = [{<<"/echo/[...]">>, sockjs_cowboy_handler, StateEcho},
+               {<<"/close/[...]">>, sockjs_cowboy_handler, StateClose},
+               {<<"/amplify/[...]">>, sockjs_cowboy_handler, StateAmplify},
+               {<<"/broadcast/[...]">>, sockjs_cowboy_handler, StateBroadcast},
+               {<<"/disabled_websocket_echo/[...]">>, sockjs_cowboy_handler,
                 StateDWSEcho},
-               {[<<"cookie_needed_echo">>, '...'], sockjs_cowboy_handler,
+               {<<"/cookie_needed_echo/[...]">>, sockjs_cowboy_handler,
                 StateCNEcho},
                {'_', ?MODULE, []}],
     Routes = [{'_',  VRoutes}], % any vhost
+    Dispatch = cowboy_router:compile(Routes),
 
     io:format(" [*] Running at http://localhost:~p~n", [Port]),
-    cowboy:start_listener(http, 100,
-                          cowboy_tcp_transport, [{port,     Port}],
-                          cowboy_http_protocol, [{dispatch, Routes}]),
+
+    cowboy:start_http(cowboy_test_server_http_listener, 100,
+                      [{port, Port}],
+                      [{env, [{dispatch, Dispatch}]}]),
     receive
         _ -> ok
     end.
@@ -55,7 +61,7 @@ init({_Any, http}, Req, []) ->
     {ok, Req, []}.
 
 handle(Req, State) ->
-    {ok, Req2} = cowboy_http_req:reply(404, [],
+    {ok, Req2} = cowboy_req:reply(404, [],
                  <<"404 - Nothing here (via sockjs-erlang fallback)\n">>, Req),
     {ok, Req2, State}.
 
similarity index 93%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/echo.html
rename to deps/sockjs/examples/echo.html
index 180cb4a167f347f01523f2b18b8f32adf3c408a0..b5718cfba4adac4e9dd62f1e8f66d3e234ba7455 100644 (file)
@@ -1,8 +1,8 @@
-<!doctype html>
+<!DOCTYPE html>
 <html><head>
   <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js">
   </script>
-  <script src="http://cdn.sockjs.org/sockjs-0.2.min.js">
+  <script src="http://cdn.sockjs.org/sockjs-0.3.4.min.js">
   </script>
   <style>
       .box {
@@ -33,7 +33,7 @@
     <h2>SockJS-erlang Echo example</h2>
     <form id="form">
       <input id="input" autocomplete="off" class="box"
-             value="type something here" />
+             placeholder="type something here" />
     </form>
     <div id="output" class="box"></div>
     <script>
diff --git a/deps/sockjs/examples/echo_authen_callback.html b/deps/sockjs/examples/echo_authen_callback.html
new file mode 100644 (file)
index 0000000..b5718cf
--- /dev/null
@@ -0,0 +1,72 @@
+<!DOCTYPE html>
+<html><head>
+  <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js">
+  </script>
+  <script src="http://cdn.sockjs.org/sockjs-0.3.4.min.js">
+  </script>
+  <style>
+      .box {
+          border: 1px dashed black;
+          border-radius: 4px;
+          -moz-border-radius: 4px;
+          width: 400px;
+          display: block;
+          height: 300px;
+          float: left;
+      }
+      #output {
+          border-color: grey;
+          overflow:auto;
+      }
+      #input {
+          vertical-align: text-top;
+          -moz-outline-style: none;
+          outline-style: none;
+          outline-width: 0px;
+          outline-color: -moz-use-text-color;
+      }
+      body {
+          background-color: #F0F0F0;
+      }
+  </style>
+</head><body lang="en">
+    <h2>SockJS-erlang Echo example</h2>
+    <form id="form">
+      <input id="input" autocomplete="off" class="box"
+             placeholder="type something here" />
+    </form>
+    <div id="output" class="box"></div>
+    <script>
+      function log(m) {
+          $('#output').append($("<code>").text(m));
+          $('#output').append($("<br>"));
+          $('#output').scrollTop($('#output').scrollTop()+10000);
+      }
+
+      var sockjs_url = '/echo';
+      var sockjs = new SockJS(sockjs_url);
+      sockjs.onopen = function() {
+          log(' [*] Connected (using: '+sockjs.protocol+')');
+      };
+      sockjs.onclose = function(e) {
+          log(' [*] Disconnected ('+e.status + ' ' + e.reason+ ')');
+      };
+      sockjs.onmessage = function(e) {
+          log(' [ ] received: ' + JSON.stringify(e.data));
+      };
+
+      $('#input').focus();
+      $('#form').submit(function() {
+          var val = $('#input').val();
+          $('#input').val('');
+          var l = ' [ ] sending: ' + JSON.stringify(val);
+          if (sockjs.readyState !== SockJS.OPEN) {
+              l += ' (error, connection not established)';
+          } else {
+              sockjs.send(val);
+          }
+          log(l);
+          return false;
+      });
+    </script>
+</body></html>
similarity index 58%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/cowboy_multiplex.erl
rename to deps/sockjs/examples/multiplex/cowboy_multiplex.erl
index 087374b52250412d035403be81f1c7a38e37d3e6..e0b8b4265377675ead46c50cca8d52c95dd61dd0 100755 (executable)
@@ -1,17 +1,21 @@
 #!/usr/bin/env escript
-%%! -smp disable +A1 +K true -pa ebin deps/cowboy/ebin -input
+%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input
 -module(cowboy_multiplex).
 -mode(compile).
 
 -export([main/1]).
 
 %% Cowboy callbacks
--export([init/3, handle/2, terminate/2]).
+-export([init/3, handle/2, terminate/3]).
 
 main(_) ->
     Port = 8081,
-    application:start(sockjs),
-    application:start(cowboy),
+    ok = application:start(xmerl),
+    ok = application:start(sockjs),
+    ok = application:start(ranch),
+    ok = application:start(crypto),
+    ok = application:start(cowlib),
+    ok = application:start(cowboy),
 
     MultiplexState = sockjs_multiplex:init_state(
                        [{"ann",  fun service_ann/3,  []},
@@ -21,14 +25,15 @@ main(_) ->
     SockjsState = sockjs_handler:init_state(
                     <<"/multiplex">>, sockjs_multiplex, MultiplexState, []),
 
-    VhostRoutes = [{[<<"multiplex">>, '...'], sockjs_cowboy_handler, SockjsState},
+    VhostRoutes = [{<<"/multiplex/[...]">>, sockjs_cowboy_handler, SockjsState},
                    {'_', ?MODULE, []}],
     Routes = [{'_',  VhostRoutes}], % any vhost
+    Dispatch = cowboy_router:compile(Routes),
 
     io:format(" [*] Running at http://localhost:~p~n", [Port]),
-    cowboy:start_listener(http, 100,
-                          cowboy_tcp_transport, [{port,     Port}],
-                          cowboy_http_protocol, [{dispatch, Routes}]),
+    cowboy:start_http(http, 100,
+                      [{port, Port}],
+                      [{env, [{dispatch, Dispatch}]}]),
     receive
         _ -> ok
     end.
@@ -39,48 +44,44 @@ init({_Any, http}, Req, []) ->
     {ok, Req, []}.
 
 handle(Req, State) ->
-    {Path, Req1} = cowboy_http_req:path(Req),
+    {Path, Req1} = cowboy_req:path(Req),
     {ok, Req2} = case Path of
-                     [<<"multiplex.js">>] ->
-                         {ok, Data} = file:read_file("./examples/multiplex/multiplex.js"),
-                         cowboy_http_req:reply(200, [{<<"Content-Type">>, "application/javascript"}],
-                                               Data, Req1);
-                     [] ->
+                     <<"/">> ->
                          {ok, Data} = file:read_file("./examples/multiplex/index.html"),
-                         cowboy_http_req:reply(200, [{<<"Content-Type">>, "text/html"}],
+                         cowboy_req:reply(200, [{<<"Content-Type">>, "text/html"}],
                                                Data, Req1);
                      _ ->
-                         cowboy_http_req:reply(404, [],
+                         cowboy_req:reply(404, [],
                                                <<"404 - Nothing here\n">>, Req1)
                  end,
     {ok, Req2, State}.
 
-terminate(_Req, _State) ->
+terminate(_Reason, _Req, _State) ->
     ok.
 
 %% --------------------------------------------------------------------------
 
 service_ann(Conn, init, State) ->
-    Conn:send("Ann says hi!"),
+    sockjs:send("Ann says hi!", Conn),
     {ok, State};
 service_ann(Conn, {recv, Data}, State) ->
-    Conn:send(["Ann nods: ", Data]),
+    sockjs:send(["Ann nods: ", Data], Conn),
     {ok, State};
 service_ann(_Conn, closed, State) ->
     {ok, State}.
 
 service_bob(Conn, init, State) ->
-    Conn:send("Bob doesn't agree."),
+    sockjs:send("Bob doesn't agree.", Conn),
     {ok, State};
 service_bob(Conn, {recv, Data}, State) ->
-    Conn:send(["Bob says no to: ", Data]),
+    sockjs:send(["Bob says no to: ", Data], Conn),
     {ok, State};
 service_bob(_Conn, closed, State) ->
     {ok, State}.
 
 service_carl(Conn, init, State) ->
-    Conn:send("Carl says goodbye!"),
-    Conn:close(),
+    sockjs:send("Carl says goodbye!", Conn),
+    sockjs:close(Conn),
     {ok, State};
 service_carl(_Conn, _, State) ->
     {ok, State}.
diff --git a/deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl b/deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl
new file mode 100755 (executable)
index 0000000..625a605
--- /dev/null
@@ -0,0 +1,107 @@
+#!/usr/bin/env escript
+%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input
+-module(cowboy_multiplex).
+-mode(compile).
+
+-export([main/1]).
+
+%% Cowboy callbacks
+-export([init/3, handle/2, terminate/3]).
+
+main(_) ->
+    Port = 8081,
+    ok = application:start(xmerl),
+    ok = application:start(sockjs),
+    ok = application:start(ranch),
+    ok = application:start(crypto),
+    ok = application:start(cowlib),
+    ok = application:start(cowboy),
+
+    MultiplexState = sockjs_multiplex:init_state(
+                       [{"ann",  fun service_ann/3,  []},
+                        {"bob",  fun service_bob/3,  []},
+                        {"carl", fun service_carl/3, []}],
+                       {fun authen/3, [{state, []}]}),
+
+    SockjsState = sockjs_handler:init_state(
+                    <<"/multiplex">>, sockjs_multiplex, MultiplexState, []),
+
+    VhostRoutes = [{<<"/multiplex/[...]">>, sockjs_cowboy_handler, SockjsState},
+                   {'_', ?MODULE, []}],
+    Routes = [{'_',  VhostRoutes}], % any vhost
+    Dispatch = cowboy_router:compile(Routes),
+
+    io:format(" [*] Running at http://localhost:~p~n", [Port]),
+    cowboy:start_http(http, 100,
+                      [{port, Port}],
+                      [{env, [{dispatch, Dispatch}]}]),
+    receive
+        _ -> ok
+    end.
+
+%% --------------------------------------------------------------------------
+
+init({_Any, http}, Req, []) ->
+    {ok, Req, []}.
+
+handle(Req, State) ->
+    {Path, Req1} = cowboy_req:path(Req),
+    {ok, Req2} = case Path of
+                     <<"/">> ->
+                         {ok, Data} = file:read_file("./examples/multiplex/index_authen_callback.html"),
+                         cowboy_req:reply(200, [{<<"Content-Type">>, "text/html"}],
+                                               Data, Req1);
+                     _ ->
+                         cowboy_req:reply(404, [],
+                                               <<"404 - Nothing here\n">>, Req1)
+                 end,
+    {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+    ok.
+
+%% --------------------------------------------------------------------------
+
+authen(Conn, init, Extra) ->
+    {ok, TRef} = timer:apply_after(5000, sockjs, close, [Conn]),
+    {ok, [TRef | Extra]};
+authen(Conn, {recv, Data}, [TRef | Extra] = State) ->
+    case Data of
+        <<"auth">> ->
+            sockjs:send(<<"Authenticate successfully!">>, Conn),
+            timer:cancel(TRef),
+            {success, [{user_id, element(3, erlang:now())} | Extra]};
+        _Else ->
+            {ok, State}
+    end;
+authen(_Conn, closed, [TRef | Extra]) ->
+    timer:cancel(TRef),
+    {ok, Extra}.
+
+service_ann(Conn, init, State) ->
+    sockjs:send("Ann says hi!", Conn),
+    {ok, State};
+service_ann(Conn, {recv, Data}, State) ->
+    {user_id, UserId} = lists:keyfind(user_id, 1, State),
+    sockjs:send(["Ann nods: ", Data, " from ", erlang:integer_to_binary(UserId)], Conn),
+    {ok, State};
+service_ann(_Conn, closed, State) ->
+    {ok, State}.
+
+service_bob(Conn, init, State) ->
+    sockjs:send("Bob doesn't agree.", Conn),
+    {ok, State};
+service_bob(Conn, {recv, Data}, State) ->
+    {user_id, UserId} = lists:keyfind(user_id, 1, State),
+    sockjs:send(["Bob says no to: ", Data, " from ", erlang:integer_to_binary(UserId)],
+                Conn),
+    {ok, State};
+service_bob(_Conn, closed, State) ->
+    {ok, State}.
+
+service_carl(Conn, init, State) ->
+    sockjs:send("Carl says goodbye!", Conn),
+    sockjs:close(Conn),
+    {ok, State};
+service_carl(_Conn, _, State) ->
+    {ok, State}.
similarity index 92%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/index.html
rename to deps/sockjs/examples/multiplex/index.html
index 5efe2fc8123077d800ca773f1155d019578a5521..3353e6f9375443a090ed62c3faa089374087d359 100644 (file)
@@ -1,8 +1,8 @@
 <!doctype html>
 <html><head>
     <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.1/jquery.min.js"></script>
-    <script src="http://cdn.sockjs.org/sockjs-0.2.min.js"></script>
-    <script src="multiplex.js"></script>
+    <script src="http://cdn.sockjs.org/sockjs-0.3.min.js"></script>
+    <script src="http://d1fxtkz8shb9d2.cloudfront.net/websocket-multiplex-0.1.js"></script>
     <style>
       .box {
           width: 300px;
@@ -82,7 +82,7 @@
         var sockjs_url = '/multiplex';
         var sockjs = new SockJS(sockjs_url);
 
-        var multiplexer = new MultiplexedWebSocket(sockjs);
+        var multiplexer = new WebSocketMultiplex(sockjs);
         var ann  = multiplexer.channel('ann');
         var bob  = multiplexer.channel('bob');
         var carl = multiplexer.channel('carl');
diff --git a/deps/sockjs/examples/multiplex/index_authen_callback.html b/deps/sockjs/examples/multiplex/index_authen_callback.html
new file mode 100644 (file)
index 0000000..ee6d8aa
--- /dev/null
@@ -0,0 +1,109 @@
+<!doctype html>
+<html><head>
+    <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.1/jquery.min.js"></script>
+    <script src="http://cdn.sockjs.org/sockjs-0.3.min.js"></script>
+    <script src="http://d1fxtkz8shb9d2.cloudfront.net/websocket-multiplex-0.1.js"></script>
+    <style>
+      .box {
+          width: 300px;
+          float: left;
+          margin: 0 20px 0 20px;
+      }
+      .box div, .box input {
+          border: 1px solid;
+          -moz-border-radius: 4px;
+          border-radius: 4px;
+          width: 100%;
+          padding: 0px;
+          margin: 5px;
+      }
+      .box div {
+          border-color: grey;
+          height: 300px;
+          overflow: auto;
+      }
+      .box input {
+          height: 30px;
+      }
+      h1 {
+          margin-left: 75px;
+      }
+      body {
+          background-color: #F0F0F0;
+          font-family: "Arial";
+      }
+    </style>
+<head><body lang="en">
+    <h1>SockJS Multiplex example</h1>
+
+    <div id="main" class="box">
+      <div></div>
+      <form><input autocomplete="off"></input></form>
+    </div>
+
+    <div id="first" class="box">
+      <div></div>
+      <form><input autocomplete="off" value="Type here..."></input></form>
+    </div>
+
+    <div id="second" class="box">
+      <div></div>
+      <form><input autocomplete="off"></input></form>
+    </div>
+
+    <div id="third" class="box">
+      <div></div>
+      <form><input autocomplete="off"></input></form>
+    </div>
+
+    <script>
+        // Pipe - convenience wrapper to present data received from an
+        // object supporting WebSocket API in an html element. And the other
+        // direction: data typed into an input box shall be sent back.
+        var pipe = function(ws, el_name) {
+            var div  = $(el_name + ' div');
+            var inp  = $(el_name + ' input');
+            var form = $(el_name + ' form');
+
+            var print = function(m, p) {
+                p = (p === undefined) ? '' : JSON.stringify(p);
+                div.append($("<code>").text(m + ' ' + p));
+                div.append($("<br>"));
+                div.scrollTop(div.scrollTop() + 10000);
+            };
+
+            ws.onopen    = function()  {print('[*] open', ws.protocol);};
+            ws.onmessage = function(e) {
+              if (e.data === 'Authenticate successfully!') {
+                var multiplexer = new WebSocketMultiplex(sockjs);
+                var ann  = multiplexer.channel('ann');
+                var bob  = multiplexer.channel('bob');
+                var carl = multiplexer.channel('carl');
+
+                pipe(ann,  '#first');
+                pipe(bob,  '#second');
+                pipe(carl, '#third');
+              }
+              else {
+                print('[.] message', e.data);
+                console.log(e.data);
+              }
+            };
+            ws.onclose   = function()  {print('[*] close');};
+
+            form.submit(function() {
+                print('[ ] sending', inp.val());
+                ws.send(inp.val());
+                inp.val('');
+                return false;
+            });
+        };
+
+        var sockjs_url = '/multiplex';
+        var sockjs = new SockJS(sockjs_url);
+
+        pipe(sockjs, '#main');
+
+        $('#first input').focus();
+    </script>
+</body></html>
diff --git a/deps/sockjs/rebar b/deps/sockjs/rebar
new file mode 100755 (executable)
index 0000000..b2bb16e
Binary files /dev/null and b/deps/sockjs/rebar differ
similarity index 53%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar.config
rename to deps/sockjs/rebar.config
index 2aa54733b15f94ae209da328d5c33eb6b31be47e..c240d9e00559638ca1446387e8348ab00140e21b 100644 (file)
@@ -4,13 +4,12 @@
 
 {erl_opts, [
             %% fail_on_warning,
-            bin_opt_info,
-            warn_missing_spec,
+            %bin_opt_info,
+            %warn_missing_spec,
             debug_info,
             warn_export_all
 ]}.
 
 {deps, [
-        {cowboy, ".*",
-         {git, "git://github.com/extend/cowboy.git", "4fb2a6face6e7d6ff1dd34a02c3bd8b63d972624"}}
+        {cowboy, "1.0.3",{git, "https://github.com/ninenines/cowboy.git", {tag, "1.0.3"}}}
        ]}.
diff --git a/deps/sockjs/src/sockjs.app.src b/deps/sockjs/src/sockjs.app.src
new file mode 100644 (file)
index 0000000..5c309cd
--- /dev/null
@@ -0,0 +1,7 @@
+{application,sockjs,
+             [{description,"SockJS"},
+              {vsn,"0.3.4"},
+              {modules,[]},
+              {registered,[]},
+              {applications,[kernel,stdlib,xmerl]},
+              {mod,{sockjs_app,[]}}]}.
diff --git a/deps/sockjs/src/sockjs.erl b/deps/sockjs/src/sockjs.erl
new file mode 100644 (file)
index 0000000..a8bf0e4
--- /dev/null
@@ -0,0 +1,38 @@
+-module(sockjs).
+
+-export([send/2, close/1, close/3, info/1]).
+-export([to_session/1, to_channel/2]).
+
+%% Send data over a connection/channel.
+-spec send(iodata(), sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok.
+send(Data, Conn = {sockjs_session, _}) ->
+    sockjs_session:send(Data, Conn);
+send(Data, Channel = {sockjs_multiplex_channel, _, _}) ->
+       sockjs_multiplex_channel:send(Data, Channel).
+
+%% Initiate a close of a connection/channel.
+-spec close(sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok.
+close(Conn) ->
+    close(1000, "Normal closure", Conn).
+
+-spec close(non_neg_integer(), string(), sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok.
+close(Code, Reason, Conn = {sockjs_session, _}) ->
+    sockjs_session:close(Code, Reason, Conn);
+close(Code, Reason, Channel = {sockjs_multiplex_channel, _, _}) ->
+    sockjs_multiplex_channel:close(Code, Reason, Channel).
+
+-spec info(sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> [{atom(), any()}].
+info(Conn = {sockjs_session, _}) ->
+    sockjs_session:info(Conn);
+info(Channel = {sockjs_multiplex_channel, _, _}) ->
+    sockjs_multiplex_channel:info(Channel).
+
+%% Get the backend connection of a channel.
+-spec to_session(sockjs_multiplex_channel:channel()) -> sockjs_session:conn().
+to_session({sockjs_multiplex_channel, Conn, _}) ->
+    Conn.
+
+%% Create a channel from a connection.
+-spec to_channel(sockjs_session:conn(), sockjs_multiplex_channel:topic()) -> sockjs_multiplex_channel:channel().
+to_channel(Conn = {sockjs_session, _}, Topic) ->
+    {sockjs_multiplex_channel, Conn, Topic}.
\ No newline at end of file
similarity index 90%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_action.erl
rename to deps/sockjs/src/sockjs_action.erl
index 43109636f20899a6a4ddeefe756245d5f768176f..c65b012142b7529730aa0287d5f4325fee68cf6b 100644 (file)
 
 %% --------------------------------------------------------------------------
 
-%% -spec welcome_screen(req(), headers(), service()) -> req().
+-spec welcome_screen(req(), headers(), service()) -> req().
 welcome_screen(Req, Headers, _Service) ->
     H = [{"Content-Type", "text/plain; charset=UTF-8"}],
     sockjs_http:reply(200, H ++ Headers,
           "Welcome to SockJS!\n", Req).
 
-%% -spec options(req(), headers(), service()) -> req().
+-spec options(req(), headers(), service()) -> req().
 options(Req, Headers, _Service) ->
     sockjs_http:reply(204, Headers, "", Req).
 
-%% -spec iframe(req(), headers(), service()) -> req().
+-spec iframe(req(), headers(), service()) -> req().
 iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) ->
     IFrame = io_lib:format(?IFRAME, [SockjsUrl]),
     MD5 = "\"" ++ binary_to_list(base64:encode(erlang:md5(IFrame))) ++ "\"",
-    {H, Req2} = sockjs_http:header('If-None-Match', Req),
+    {H, Req2} = sockjs_http:header('if-none-match', Req),
     case H of
         MD5 -> sockjs_http:reply(304, Headers, "", Req2);
         _   -> sockjs_http:reply(
@@ -68,7 +68,7 @@ iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) ->
     end.
 
 
-%% -spec info_test(req(), headers(), service()) -> req().
+-spec info_test(req(), headers(), service()) -> req().
 info_test(Req, Headers, #service{websocket = Websocket,
                                  cookie_needed = CookieNeeded}) ->
     I = [{websocket, Websocket},
@@ -81,12 +81,12 @@ info_test(Req, Headers, #service{websocket = Websocket,
 
 %% --------------------------------------------------------------------------
 
-%% -spec xhr_polling(req(), headers(), service(), session()) -> req().
+-spec xhr_polling(req(), headers(), service(), session()) -> req().
 xhr_polling(Req, Headers, Service, Session) ->
     Req1 = chunk_start(Req, Headers),
     reply_loop(Req1, Session, 1, fun fmt_xhr/1, Service).
 
-%% -spec xhr_streaming(req(), headers(), service(), session()) -> req().
+-spec xhr_streaming(req(), headers(), service(), session()) -> req().
 xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit},
               Session) ->
     Req1 = chunk_start(Req, Headers),
@@ -96,7 +96,7 @@ xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit},
                  fun fmt_xhr/1),
     reply_loop(Req2, Session, ResponseLimit, fun fmt_xhr/1, Service).
 
-%% -spec eventsource(req(), headers(), service(), session()) -> req().
+-spec eventsource(req(), headers(), service(), session()) -> req().
 eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit},
             SessionId) ->
     Req1 = chunk_start(Req, Headers, "text/event-stream; charset=UTF-8"),
@@ -104,7 +104,7 @@ eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit},
     reply_loop(Req2, SessionId, ResponseLimit, fun fmt_eventsource/1, Service).
 
 
-%% -spec htmlfile(req(), headers(), service(), session()) -> req().
+-spec htmlfile(req(), headers(), service(), session()) -> req().
 htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit},
          SessionId) ->
     S = fun (Req1, CB) ->
@@ -119,7 +119,7 @@ htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit},
         end,
     verify_callback(Req, S).
 
-%% -spec jsonp(req(), headers(), service(), session()) -> req().
+-spec jsonp(req(), headers(), service(), session()) -> req().
 jsonp(Req, Headers, Service, SessionId) ->
     S = fun (Req1, CB) ->
                 Req2 = chunk_start(Req1, Headers),
@@ -139,7 +139,7 @@ verify_callback(Req, Success) ->
 
 %% --------------------------------------------------------------------------
 
-%% -spec xhr_send(req(), headers(), service(), session()) -> req().
+-spec xhr_send(req(), headers(), service(), session()) -> req().
 xhr_send(Req, Headers, _Service, Session) ->
     {Body, Req1} = sockjs_http:body(Req),
     case handle_recv(Req1, Body, Session) of
@@ -150,7 +150,7 @@ xhr_send(Req, Headers, _Service, Session) ->
             sockjs_http:reply(204, H ++ Headers, "", Req1)
     end.
 
-%% -spec jsonp_send(req(), headers(), service(), session()) -> req().
+-spec jsonp_send(req(), headers(), service(), session()) -> req().
 jsonp_send(Req, Headers, _Service, Session) ->
     {Body, Req1} = sockjs_http:body_qs(Req),
     case handle_recv(Req1, Body, Session) of
@@ -236,21 +236,21 @@ chunk_end(Req) -> sockjs_http:chunk_end(Req).
 chunk_end(Req, Body, Fmt) -> Req1 = chunk(Req, Body, Fmt),
                              chunk_end(Req1).
 
-%% -spec fmt_xhr(iodata()) -> iodata().
+-spec fmt_xhr(iodata()) -> iodata().
 fmt_xhr(Body) -> [Body, "\n"].
 
-%% -spec fmt_eventsource(iodata()) -> iodata().
+-spec fmt_eventsource(iodata()) -> iodata().
 fmt_eventsource(Body) ->
     Escaped = sockjs_util:url_escape(binary_to_list(iolist_to_binary(Body)),
                                      "%\r\n\0"), %% $% must be first!
     [<<"data: ">>, Escaped, <<"\r\n\r\n">>].
 
-%% -spec fmt_htmlfile(iodata()) -> iodata().
+-spec fmt_htmlfile(iodata()) -> iodata().
 fmt_htmlfile(Body) ->
     Double = sockjs_json:encode(iolist_to_binary(Body)),
     [<<"<script>\np(">>, Double, <<");\n</script>\r\n">>].
 
-%% -spec fmt_jsonp(iodata(), iodata()) -> iodata().
+-spec fmt_jsonp(iodata(), iodata()) -> iodata().
 fmt_jsonp(Body, Callback) ->
     %% Yes, JSONed twice, there isn't a a better way, we must pass
     %% a string back, and the script, will be evaled() by the
@@ -259,7 +259,7 @@ fmt_jsonp(Body, Callback) ->
 
 %% --------------------------------------------------------------------------
 
-%% -spec websocket(req(), headers(), service()) -> req().
+-spec websocket(req(), headers(), service()) -> req().
 websocket(Req, Headers, Service) ->
     {_Any, Req1, {R1, R2}} = sockjs_handler:is_valid_ws(Service, Req),
     case {R1, R2} of
@@ -274,6 +274,6 @@ websocket(Req, Headers, Service) ->
                               "This WebSocket request can't be handled.", Req1)
     end.
 
-%% -spec rawwebsocket(req(), headers(), service()) -> req().
+-spec rawwebsocket(req(), headers(), service()) -> req().
 rawwebsocket(Req, Headers, Service) ->
     websocket(Req, Headers, Service).
similarity index 76%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_app.erl
rename to deps/sockjs/src/sockjs_app.erl
index 54aceb6c40139f79ee2496d9f0426d0e87521a1e..1b8e77c204d313c25c807d9baf305e32d0a49295 100644 (file)
@@ -4,11 +4,11 @@
 
 -export([start/2, stop/1]).
 
-%% -spec start(_, _) -> {ok, pid()}.
+-spec start(_, _) -> {ok, pid()}.
 start(_StartType, _StartArgs) ->
     sockjs_session:init(),
     sockjs_session_sup:start_link().
 
-%% -spec stop(_) -> ok.
+-spec stop(_) -> ok.
 stop(_State) ->
     ok.
similarity index 91%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_cowboy_handler.erl
rename to deps/sockjs/src/sockjs_cowboy_handler.erl
index d2f05ae3d0cd6ec4cbf3db6d1bbd10944c2abb8d..c66c9d4a6a285e37eeffaa366abf0dd1c4696b4d 100644 (file)
@@ -1,9 +1,9 @@
 -module(sockjs_cowboy_handler).
 -behaviour(cowboy_http_handler).
--behaviour(cowboy_http_websocket_handler).
+-behaviour(cowboy_websocket_handler).
 
 %% Cowboy http callbacks
--export([init/3, handle/2, terminate/2]).
+-export([init/3, handle/2, terminate/3]).
 
 %% Cowboy ws callbacks
 -export([websocket_init/3, websocket_handle/3,
@@ -16,7 +16,7 @@
 init({_Any, http}, Req, Service) ->
     case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of
         {true, {cowboy, _Req1}, _Reason} ->
-            {upgrade, protocol, cowboy_http_websocket};
+            {upgrade, protocol, cowboy_websocket};
         {false, {cowboy, Req1}, _Reason} ->
             {ok, Req1, Service}
     end.
@@ -25,7 +25,7 @@ handle(Req, Service) ->
     {cowboy, Req3} = sockjs_handler:handle_req(Service, {cowboy, Req}),
     {ok, Req3, Service}.
 
-terminate(_Req, _Service) ->
+terminate(_Reason, _Req, _Service) ->
     ok.
 
 %% --------------------------------------------------------------------------
@@ -33,20 +33,20 @@ terminate(_Req, _Service) ->
 websocket_init(_TransportName, Req,
                Service = #service{logger        = Logger,
                                   subproto_pref = SubProtocolPref}) ->
-    Req3 = case cowboy_http_req:header(<<"Sec-Websocket-Protocol">>, Req) of
+    Req3 = case cowboy_req:header(<<"Sec-Websocket-Protocol">>, Req) of
                {undefined, Req1} ->
                    Req1;
                {SubProtocols, Req1} ->
                    SelectedSubProtocol =
                      choose_subprotocol_bin(SubProtocols, SubProtocolPref),
-                   {ok, Req2} = cowboy_http_req:set_resp_header(
+                   {ok, Req2} = cowboy_req:set_resp_header(
                                   <<"Sec-Websocket-Protocol">>,
                                   SelectedSubProtocol, Req1),
                    Req2
            end,
 
     Req4 = Logger(Service, {cowboy, Req3}, websocket),
-
     Service1 = Service#service{disconnect_delay = 5*60*1000},
 
     {Info, Req5} = sockjs_handler:extract_info(Req4),
@@ -59,7 +59,7 @@ websocket_init(_TransportName, Req,
         end,
     self() ! go,
     {ok, Req7, {RawWebsocket, SessionPid}}.
-
 websocket_handle({text, Data}, Req, {RawWebsocket, SessionPid} = S) ->
     case sockjs_ws_handler:received(RawWebsocket, SessionPid, Data) of
         ok       -> {ok, Req, S};
similarity index 79%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_filters.erl
rename to deps/sockjs/src/sockjs_filters.erl
index fba43cc48913180f6f62103da5ed4e8bd9778fb1..c4176bd7a08ece1aa0ddaf3a0ec3bb2336b62302 100644 (file)
@@ -1,15 +1,15 @@
 -module(sockjs_filters).
 
--include("sockjs_internal.hrl").
-
 -export([cache_for/2, h_sid/2, h_no_cache/2, xhr_cors/2,
          xhr_options_post/2, xhr_options_get/2]).
 
+-include("sockjs_internal.hrl").
+
 -define(YEAR, 365 * 24 * 60 * 60).
 
 %% --------------------------------------------------------------------------
 
-%% -spec cache_for(req(), headers()) -> {headers(), req()}.
+-spec cache_for(req(), headers()) -> {headers(), req()}.
 cache_for(Req, Headers) ->
     Expires = calendar:gregorian_seconds_to_datetime(
                 calendar:datetime_to_gregorian_seconds(
@@ -18,7 +18,7 @@ cache_for(Req, Headers) ->
          {"Expires",       httpd_util:rfc1123_date(Expires)}],
     {H ++ Headers, Req}.
 
-%% -spec h_sid(req(), headers()) -> {headers(), req()}.
+-spec h_sid(req(), headers()) -> {headers(), req()}.
 h_sid(Req, Headers) ->
     %% Some load balancers do sticky sessions, but only if there is
     %% a JSESSIONID cookie. If this cookie isn't yet set, we shall
@@ -31,21 +31,21 @@ h_sid(Req, Headers) ->
     end,
     {H ++ Headers, Req2}.
 
-%% -spec h_no_cache(req(), headers()) -> {headers(), req()}.
+-spec h_no_cache(req(), headers()) -> {headers(), req()}.
 h_no_cache(Req, Headers) ->
     H = [{"Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"}],
     {H ++ Headers, Req}.
 
-%% -spec xhr_cors(req(), headers()) -> {headers(), req()}.
+-spec xhr_cors(req(), headers()) -> {headers(), req()}.
 xhr_cors(Req, Headers) ->
-    {OriginH, Req1} = sockjs_http:header('Origin', Req),
+    {OriginH, Req1} = sockjs_http:header('origin', Req),
      Origin = case OriginH of
                   "null"    -> "*";
                   undefined -> "*";
                   O         -> O
               end,
     {HeadersH, Req2} = sockjs_http:header(
-                             'Access-Control-Request-Headers', Req1),
+                             'access-control-request-headers', Req1),
     AllowHeaders = case HeadersH of
                        undefined -> [];
                        V         -> [{"Access-Control-Allow-Headers", V}]
@@ -54,15 +54,15 @@ xhr_cors(Req, Headers) ->
          {"Access-Control-Allow-Credentials", "true"}],
     {H ++ AllowHeaders ++ Headers, Req2}.
 
-%% -spec xhr_options_post(req(), headers()) -> {headers(), req()}.
+-spec xhr_options_post(req(), headers()) -> {headers(), req()}.
 xhr_options_post(Req, Headers) ->
     xhr_options(Req, Headers, ["OPTIONS", "POST"]).
 
-%% -spec xhr_options_get(req(), headers()) -> {headers(), req()}.
+-spec xhr_options_get(req(), headers()) -> {headers(), req()}.
 xhr_options_get(Req, Headers) ->
     xhr_options(Req, Headers, ["OPTIONS", "GET"]).
 
-%% -spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}.
+-spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}.
 xhr_options(Req, Headers, Methods) ->
     H = [{"Access-Control-Allow-Methods", string:join(Methods, ", ")},
          {"Access-Control-Max-Age", integer_to_list(?YEAR)}],
similarity index 85%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_handler.erl
rename to deps/sockjs/src/sockjs_handler.erl
index 81d4ef7ef69cbe659fde1f49588c8ace56124f2d..b6d139db603eb6467a17095b014280c1b834fc55 100644 (file)
@@ -7,11 +7,11 @@
 
 -include("sockjs_internal.hrl").
 
--define(SOCKJS_URL, "http://cdn.sockjs.org/sockjs-0.2.js").
+-define(SOCKJS_URL, "//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js").
 
 %% --------------------------------------------------------------------------
 
-%% -spec init_state(binary(), callback(), any(), list(tuple())) -> service().
+-spec init_state(binary(), callback(), any(), list(tuple())) -> service().
 init_state(Prefix, Callback, State, Options) ->
     #service{prefix = binary_to_list(Prefix),
              callback = Callback,
@@ -36,7 +36,7 @@ init_state(Prefix, Callback, State, Options) ->
 
 %% --------------------------------------------------------------------------
 
-%% -spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}.
+-spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}.
 is_valid_ws(Service, Req) ->
     case get_action(Service, Req) of
         {{match, WS}, Req1} when WS =:= websocket orelse
@@ -46,14 +46,14 @@ is_valid_ws(Service, Req) ->
             {false, Req1, {}}
     end.
 
-%% -spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}.
+-spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}.
 valid_ws_request(_Service, Req) ->
     {R1, Req1} = valid_ws_upgrade(Req),
     {R2, Req2} = valid_ws_connection(Req1),
     {R1 and R2, Req2, {R1, R2}}.
 
 valid_ws_upgrade(Req) ->
-    case sockjs_http:header('Upgrade', Req) of
+    case sockjs_http:header('upgrade', Req) of
         {undefined, Req2} ->
             {false, Req2};
         {V, Req2} ->
@@ -66,7 +66,7 @@ valid_ws_upgrade(Req) ->
     end.
 
 valid_ws_connection(Req) ->
-    case sockjs_http:header('Connection', Req) of
+    case sockjs_http:header('connection', Req) of
         {undefined, Req2} ->
             {false, Req2};
         {V, Req2} ->
@@ -75,7 +75,7 @@ valid_ws_connection(Req) ->
             {lists:member("upgrade", Vs), Req2}
     end.
 
-%% -spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}.
+-spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}.
 get_action(Service, Req) ->
     {Dispatch, Req1} = dispatch_req(Service, Req),
     case Dispatch of
@@ -95,20 +95,20 @@ strip_prefix(LongPath, Prefix) ->
     end.
 
 
-%% -type(dispatch_result() ::
-%%        nomatch |
-%%        {match, {send | recv | none , atom(),
-%%                 server(), session(), list(atom())}} |
-%%        {bad_method, list(atom())}).
+-type(dispatch_result() ::
+        nomatch |
+        {match, {send | recv | none , atom(),
+                 server(), session(), list(atom())}} |
+        {bad_method, list(atom())}).
 
-%% -spec dispatch_req(service(), req()) -> {dispatch_result(), req()}.
+-spec dispatch_req(service(), req()) -> {dispatch_result(), req()}.
 dispatch_req(#service{prefix = Prefix}, Req) ->
     {Method, Req1} = sockjs_http:method(Req),
     {LongPath, Req2} = sockjs_http:path(Req1),
     {ok, PathRemainder} = strip_prefix(LongPath, Prefix),
     {dispatch(Method, PathRemainder), Req2}.
 
-%% -spec dispatch(atom(), nonempty_string()) -> dispatch_result().
+-spec dispatch(atom(), nonempty_string()) -> dispatch_result().
 dispatch(Method, Path) ->
     lists:foldl(
       fun ({Match, MethodFilters}, nomatch) ->
@@ -165,7 +165,7 @@ re(Path, S) ->
 
 %% --------------------------------------------------------------------------
 
-%% -spec handle_req(service(), req()) -> req().
+-spec handle_req(service(), req()) -> req().
 handle_req(Service = #service{logger = Logger}, Req) ->
     Req0 = Logger(Service, Req, http),
 
@@ -204,14 +204,14 @@ handle({match, {Type, Action, _Server, Session, Filters}}, Service, Req) ->
 
 %% --------------------------------------------------------------------------
 
-%% -spec default_logger(service(), req(), websocket | http) -> req().
+-spec default_logger(service(), req(), websocket | http) -> req().
 default_logger(_Service, Req, _Type) ->
     {LongPath, Req1} = sockjs_http:path(Req),
     {Method, Req2}   = sockjs_http:method(Req1),
     io:format("~s ~s~n", [Method, LongPath]),
     Req2.
 
-%% -spec extract_info(req()) -> {info(), req()}.
+-spec extract_info(req()) -> {info(), req()}.
 extract_info(Req) ->
     {Peer, Req0}    = sockjs_http:peername(Req),
     {Sock, Req1}    = sockjs_http:sockname(Req0),
@@ -222,9 +222,14 @@ extract_info(Req) ->
                                               {V, R1}         -> {[{H, V} | Acc], R1}
                                           end
                                   end, {[], Req2},
-                                  ['Referer', 'X-Client-Ip', 'X-Forwarded-For',
-                                   'X-Cluster-Client-Ip', 'Via', 'X-Real-Ip']),
+                                  ['referer', 'x-client-ip', 'x-forwarded-for',
+                                   'x-cluster-client-ip', 'via', 'x-real-ip',
+    %% RabbitMQ-Web-STOMP needs this header for HTTP Basic Auth.
+                                   'authorization']),
+    %% RabbitMQ-Management needs the socket to figure out if it is SSL/TLS.
+    Socket  = cowboy_req:get(socket, element(2, Req3)),
     {[{peername, Peer},
       {sockname, Sock},
       {path, Path},
-      {headers, Headers}], Req3}.
+      {headers, Headers},
+      {socket, Socket}], Req3}.
diff --git a/deps/sockjs/src/sockjs_http.erl b/deps/sockjs/src/sockjs_http.erl
new file mode 100644 (file)
index 0000000..828247d
--- /dev/null
@@ -0,0 +1,144 @@
+-module(sockjs_http).
+
+-export([path/1, method/1, body/1, body_qs/1, header/2, jsessionid/1,
+         callback/1, peername/1, sockname/1]).
+-export([reply/4, chunk_start/3, chunk/2, chunk_end/1]).
+-export([hook_tcp_close/1, unhook_tcp_close/1, abruptly_kill/1]).
+-include("sockjs_internal.hrl").
+
+%% --------------------------------------------------------------------------
+
+-spec path(req()) -> {string(), req()}.
+path({cowboy, Req})       -> {Path, Req1} = cowboy_req:path(Req),
+                             {binary_to_list(Path), {cowboy, Req1}}.
+
+-spec method(req()) -> {atom(), req()}.
+method({cowboy, Req})       -> {Method, Req1} = cowboy_req:method(Req),
+                               {method_atom(Method), {cowboy, Req1}}.
+
+-spec method_atom(binary() | atom()) -> atom().
+method_atom(<<"GET">>) -> 'GET';
+method_atom(<<"PUT">>) -> 'PUT';
+method_atom(<<"POST">>) -> 'POST';
+method_atom(<<"DELETE">>) -> 'DELETE';
+method_atom(<<"OPTIONS">>) -> 'OPTIONS';
+method_atom(<<"PATCH">>) -> 'PATCH';
+method_atom(<<"HEAD">>) -> 'HEAD';
+method_atom('GET') -> 'GET';
+method_atom('PUT') -> 'PUT';
+method_atom('POST') -> 'POST';
+method_atom('DELETE') -> 'DELETE';
+method_atom('OPTIONS') -> 'OPTIONS';
+method_atom('PATCH') -> 'PATCH';
+method_atom('HEAD') -> 'HEAD'.
+
+-spec body(req()) -> {binary(), req()}.
+body({cowboy, Req})       -> {ok, Body, Req1} = cowboy_req:body(Req),
+                             {Body, {cowboy, Req1}}.
+
+-spec body_qs(req()) -> {binary(), req()}.
+body_qs(Req) ->
+    {H, Req1} =  header('content-type', Req),
+    case H of
+        H when H =:= "text/plain" orelse H =:= "" ->
+            body(Req1);
+        _ ->
+            %% By default assume application/x-www-form-urlencoded
+            body_qs2(Req1)
+    end.
+body_qs2({cowboy, Req}) ->
+    {ok, BodyQS, Req1} = cowboy_req:body_qs(Req),
+    case proplists:get_value(<<"d">>, BodyQS) of
+        undefined ->
+            {<<>>, {cowboy, Req1}};
+        V ->
+            {V, {cowboy, Req1}}
+    end.
+
+-spec header(atom(), req()) -> {nonempty_string() | undefined, req()}.
+header(K, {cowboy, Req})->
+    {H, Req2} = cowboy_req:header(K, Req),
+    {V, Req3} = case H of
+                    undefined ->
+                        cowboy_req:header(atom_to_binary(K, utf8), Req2);
+                    _ -> {H, Req2}
+                end,
+    case V of
+        undefined -> {undefined, {cowboy, Req3}};
+        _         -> {binary_to_list(V), {cowboy, Req3}}
+    end.
+
+-spec jsessionid(req()) -> {nonempty_string() | undefined, req()}.
+jsessionid({cowboy, Req}) ->
+    {C, Req2} = cowboy_req:cookie(<<"JSESSIONID">>, Req),
+    case C of
+        _ when is_binary(C) ->
+            {binary_to_list(C), {cowboy, Req2}};
+        undefined ->
+            {undefined, {cowboy, Req2}}
+    end.
+
+-spec callback(req()) -> {nonempty_string() | undefined, req()}.
+callback({cowboy, Req}) ->
+    {CB, Req1} = cowboy_req:qs_val(<<"c">>, Req),
+    case CB of
+        undefined -> {undefined, {cowboy, Req1}};
+        _         -> {binary_to_list(CB), {cowboy, Req1}}
+    end.
+
+-spec peername(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
+peername({cowboy, Req}) ->
+    {P, Req1} = cowboy_req:peer(Req),
+    {P, {cowboy, Req1}}.
+
+-spec sockname(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
+sockname({cowboy, Req} = R) ->
+    [Socket, Transport] = cowboy_req:get([socket, transport], Req),
+    {ok, SockName} = Transport:sockname(Socket),
+    {SockName, R}.
+
+%% --------------------------------------------------------------------------
+
+-spec reply(non_neg_integer(), headers(), iodata(), req()) -> req().
+reply(Code, Headers, Body, {cowboy, Req}) ->
+    Body1 = iolist_to_binary(Body),
+    {ok, Req1} = cowboy_req:reply(Code, enbinary(Headers), Body1, Req),
+    {cowboy, Req1}.
+
+-spec chunk_start(non_neg_integer(), headers(), req()) -> req().
+chunk_start(Code, Headers, {cowboy, Req}) ->
+    {ok, Req1} = cowboy_req:chunked_reply(Code, enbinary(Headers), Req),
+    {cowboy, Req1}.
+
+-spec chunk(iodata(), req()) -> {ok | error, req()}.
+chunk(Chunk, {cowboy, Req} = R) ->
+    case cowboy_req:chunk(Chunk, Req) of
+        ok          -> {ok, R};
+        {error, _E} -> {error, R}
+                      %% This shouldn't happen too often, usually we
+                      %% should catch tco socket closure before.
+    end.
+
+-spec chunk_end(req()) -> req().
+chunk_end({cowboy, _Req} = R)  -> R.
+
+enbinary(L) -> [{list_to_binary(K), list_to_binary(V)} || {K, V} <- L].
+
+
+-spec hook_tcp_close(req()) -> req().
+hook_tcp_close(R = {cowboy, Req}) ->
+    [T, S] = cowboy_req:get([transport, socket], Req),
+    T:setopts(S,[{active,once}]),
+    R.
+
+-spec unhook_tcp_close(req()) -> req().
+unhook_tcp_close(R = {cowboy, Req}) ->
+    [T, S] = cowboy_req:get([transport, socket], Req),
+    T:setopts(S,[{active,false}]),
+    R.
+
+-spec abruptly_kill(req()) -> req().
+abruptly_kill(R = {cowboy, Req}) ->
+    [T, S] = cowboy_req:get([transport, socket], Req),
+    ok = T:close(S),
+    R.
diff --git a/deps/sockjs/src/sockjs_internal.hrl b/deps/sockjs/src/sockjs_internal.hrl
new file mode 100644 (file)
index 0000000..916c1bd
--- /dev/null
@@ -0,0 +1,33 @@
+
+-type(req()          :: {cowboy, any()}).
+
+-type(user_session() :: nonempty_string()).
+-type(emittable()    :: init|closed|{recv, binary()}).
+-type(callback()     :: fun((user_session(), emittable(), any()) -> ok)).
+-type(logger()       :: fun((any(), req(), websocket|http) -> req())).
+
+-record(service, {prefix           :: nonempty_string(),
+                  callback         :: callback(),
+                  state            :: any(),
+                  sockjs_url       :: nonempty_string(),
+                  cookie_needed    :: boolean(),
+                  websocket        :: boolean(),
+                  disconnect_delay :: non_neg_integer(),
+                  heartbeat_delay  :: non_neg_integer(),
+                  response_limit   :: non_neg_integer(),
+                  logger           :: logger(),
+                  subproto_pref    :: [binary()]
+                  }).
+
+-type(service() :: #service{}).
+
+-type(headers() :: list({nonempty_string(), nonempty_string()})).
+-type(server()  :: nonempty_string()).
+-type(session() :: nonempty_string()).
+
+-type(frame()   :: {open, nil} |
+                   {close, {non_neg_integer(), string()}} |
+                   {data, list(iodata())} |
+                   {heartbeat, nil} ).
+
+-type(info()    :: [{atom(), any()}]).
similarity index 76%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_json.erl
rename to deps/sockjs/src/sockjs_json.erl
index d3dae20f878a6bffd3cbdf681e5ac2d5341d9e26..e61f4b9ad1373d471e360243556b434fde2f9d5d 100644 (file)
@@ -4,11 +4,11 @@
 
 %% --------------------------------------------------------------------------
 
-%% -spec encode(any()) -> iodata().
+-spec encode(any()) -> iodata().
 encode(Thing) ->
     mochijson2_fork:encode(Thing).
 
-%% -spec decode(iodata()) -> {ok, any()} | {error, any()}.
+-spec decode(iodata()) -> {ok, any()} | {error, any()}.
 decode(Encoded) ->
     try mochijson2_fork:decode(Encoded) of
         V -> {ok, V}
diff --git a/deps/sockjs/src/sockjs_multiplex.erl b/deps/sockjs/src/sockjs_multiplex.erl
new file mode 100644 (file)
index 0000000..3922e8c
--- /dev/null
@@ -0,0 +1,143 @@
+-module(sockjs_multiplex).
+
+-behaviour(sockjs_service).
+
+-export([init_state/1, init_state/2]).
+-export([sockjs_init/2, sockjs_handle/3, sockjs_terminate/2]).
+
+-record(service, {callback, state, vconn}).
+-record(authen_callback, {callback, success = false}).
+
+%% --------------------------------------------------------------------------
+
+init_state(Services, {AuthenCallback, Options}) ->
+    L = [{Topic, #service{callback = Callback, state = State}} ||
+            {Topic, Callback, State} <- Services],
+
+    Extra = case lists:keyfind(state, 1, Options) of
+                {state, ExtraValue} ->
+                    case erlang:is_list(ExtraValue) of
+                        true ->
+                            ExtraValue;
+                        false ->
+                            []
+                    end;
+                false ->
+                    []
+            end,
+
+    % Services, Channels, AuthenCallback, Extra
+    {orddict:from_list(L), orddict:new(),
+     #authen_callback{callback = AuthenCallback, success = false},
+     Extra}.
+
+init_state(Services) ->
+    init_state(Services, {undefined, []}).
+
+
+%% Get result of authentication callback if it exists.
+%% Otherwise return ``authen_callback_not_found``.
+%% Authentication callback should return {ok, State} or {success, State}.
+get_authen_callback_result(#authen_callback{callback = AuthenCallback},
+                           Handle, What, Extra) ->
+    case erlang:is_function(AuthenCallback) of
+        true ->
+            AuthenCallback(Handle, What, Extra);
+        false ->
+            authen_callback_not_found
+    end.
+
+sockjs_init(Conn, {Services, Channels, AuthenCallbackRec, Extra} = S) ->
+    case get_authen_callback_result(AuthenCallbackRec, Conn, init, Extra) of
+        authen_callback_not_found ->
+            {ok, S};
+        {ok, Extra1} ->
+            {ok, {Services, Channels, AuthenCallbackRec, Extra1}}
+    end.
+
+sockjs_handle_via_channel(Conn, Data, {Services, Channels, AuthenCallbackRec, Extra}) ->
+    [Type, Topic, Payload] = split($,, binary_to_list(Data), 3),
+    case orddict:find(Topic, Services) of
+        {ok, Service} ->
+            Channels1 = action(Conn, {Type, Topic, Payload}, Service, Channels, Extra),
+            {ok, {Services, Channels1, AuthenCallbackRec, Extra}};
+        _Else ->
+            {ok, {Services, Channels, AuthenCallbackRec, Extra}}
+    end.
+
+sockjs_handle(Conn, Data, {Services, Channels,
+                           #authen_callback{success = Success} = AuthenCallbackRec,
+                           Extra} = S) ->
+    case Success of
+        true ->
+            sockjs_handle_via_channel(Conn, Data, S);
+        false ->
+            case get_authen_callback_result(AuthenCallbackRec, Conn, {recv, Data}, Extra) of
+                authen_callback_not_found ->
+                    sockjs_handle_via_channel(Conn, Data, {Services, Channels, AuthenCallbackRec, Extra});
+                {success, Extra1} ->
+                    {ok, {Services, Channels, AuthenCallbackRec#authen_callback{success = true}, Extra1}};
+                {ok, Extra1} ->
+                    {ok, {Services, Channels, AuthenCallbackRec, Extra1}}
+            end
+    end.
+
+sockjs_terminate(Conn, {Services, Channels, AuthenCallbackRec, Extra}) ->
+    case get_authen_callback_result(AuthenCallbackRec, Conn, closed, Extra) of
+        {ok, Extra1} ->
+            ok;
+        _Else ->
+            Extra1 = Extra
+    end,
+
+    _ = [ {emit(closed, Channel)} ||
+            {_Topic, Channel} <- orddict:to_list(Channels) ],
+    {ok, {Services, orddict:new(), AuthenCallbackRec, Extra1}}.
+
+
+action(Conn, {Type, Topic, Payload}, Service, Channels, Extra) ->
+    case {Type, orddict:is_key(Topic, Channels)} of
+        {"sub", false} ->
+            Channel = Service#service{
+                            state = Service#service.state ++ Extra,
+                            vconn = {sockjs_multiplex_channel, Conn, Topic}
+                            },
+            orddict:store(Topic, emit(init, Channel), Channels);
+        {"uns", true} ->
+            Channel = orddict:fetch(Topic, Channels),
+            emit(closed, Channel),
+            orddict:erase(Topic, Channels);
+        {"msg", true} ->
+            Channel = orddict:fetch(Topic, Channels),
+            orddict:store(Topic, emit({recv, Payload}, Channel), Channels);
+        _Else ->
+            %% Ignore
+            Channels
+    end.
+
+
+emit(What, Channel = #service{callback = Callback,
+                              state    = State,
+                              vconn    = VConn}) ->
+    case Callback(VConn, What, State) of
+        {ok, State1} -> Channel#service{state = State1};
+        ok           -> Channel
+    end.
+
+
+%% --------------------------------------------------------------------------
+
+split(Char, Str, Limit) when Limit > 0 ->
+    Acc = split(Char, Str, Limit, []),
+    lists:reverse(Acc);
+split(_Char, Str, 0) ->
+    [Str].
+
+split(_Char, Str, 1, Acc) ->
+    [Str | Acc];
+split(Char, Str, Limit, Acc) ->
+    {L, R} = case string:chr(Str, Char) of
+                 0 -> {Str, ""};
+                 I -> {string:substr(Str, 1, I-1), string:substr(Str, I+1)}
+             end,
+    split(Char, R, Limit-1, [L | Acc]).
diff --git a/deps/sockjs/src/sockjs_multiplex_channel.erl b/deps/sockjs/src/sockjs_multiplex_channel.erl
new file mode 100644 (file)
index 0000000..4cc48fa
--- /dev/null
@@ -0,0 +1,25 @@
+-module(sockjs_multiplex_channel).
+
+-export([send/2, close/1, close/3, info/1]).
+
+-type(channel() :: {?MODULE, sockjs_session:conn(), topic()}).
+-type(topic()    :: string()).
+
+-export_type([channel/0, topic/0]).
+
+
+-spec send(iodata(), channel()) -> ok.
+send(Data, {?MODULE, Conn = {sockjs_session, _}, Topic}) ->
+       sockjs_session:send(iolist_to_binary(["msg", ",", Topic, ",", Data]), Conn).
+
+-spec close(channel()) -> ok.
+close(Channel) ->
+    close(1000, "Normal closure", Channel).
+
+-spec close(non_neg_integer(), string(), channel()) -> ok.
+close(_Code, _Reason, {?MODULE, Conn, Topic}) ->
+       sockjs_session:send(iolist_to_binary(["uns", ",", Topic]), Conn).
+
+-spec info(channel()) -> [{atom(), any()}].
+info({?MODULE, Conn = {sockjs_session, _}, Topic}) ->
+    sockjs_session:info(Conn) ++ [{topic, Topic}].
\ No newline at end of file
similarity index 86%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session.erl
rename to deps/sockjs/src/sockjs_session.erl
index 7e4ae00ddcdfe8c745445c24bd0e425ea7c9741a..d2d5d8b6d7efb04981cc29b8115d9df6631f3814 100644 (file)
          handle_cast/2]).
 
 -include("sockjs_internal.hrl").
-%% -type(handle() :: {?MODULE, {pid(), info()}}).
-
--record(session, {id                           , %%  session(),
-                  outbound_queue = queue:new() , %%  queue()
-                  response_pid                 , %%  pid()
-                  disconnect_tref              , %%  reference()
-                  disconnect_delay = 5000      , %%  non_neg_integer()
-                  heartbeat_tref               , %%  reference() | triggered
-                  heartbeat_delay = 25000      , %%  non_neg_integer()
-                  ready_state = connecting     , %%  connecting | open | closed
-                  close_msg                    , %%  {non_neg_integer(), string()}
+
+-type(handle() :: {?MODULE, {pid(), info()}}).
+-type(conn()   :: {?MODULE, any()}).
+
+-export_type([conn/0]).
+
+-ifdef(pre17_type_specs).
+-define(QUEUE_TYPE, queue()).
+-else.
+-define(QUEUE_TYPE, queue:queue()).
+-endif.
+
+-record(session, {id                           :: session(),
+                  outbound_queue = queue:new() :: ?QUEUE_TYPE,
+                  response_pid                 :: pid(),
+                  disconnect_tref              :: reference(),
+                  disconnect_delay = 5000      :: non_neg_integer(),
+                  heartbeat_tref               :: reference() | triggered,
+                  heartbeat_delay = 25000      :: non_neg_integer(),
+                  ready_state = connecting     :: connecting | open | closed,
+                  close_msg                    :: {non_neg_integer(), string()},
                   callback,
                   state,
-                  handle                        %%  handle()
+                  handle                       :: handle()
                  }).
 -define(ETS, sockjs_table).
 
 
-%% -type(session_or_undefined() :: session() | undefined).
-%% -type(session_or_pid() :: session() | pid()).
+-type(session_or_undefined() :: session() | undefined).
+-type(session_or_pid() :: session() | pid()).
 
 %% --------------------------------------------------------------------------
 
-%% -spec init() -> ok.
+-spec init() -> ok.
 init() ->
     _ = ets:new(?ETS, [public, named_table]),
     ok.
 
-%% -spec start_link(session_or_undefined(), service(), info()) -> {ok, pid()}.
+-spec start_link(session_or_undefined(), service(), info()) -> {ok, pid()}.
 start_link(SessionId, Service, Info) ->
     gen_server:start_link(?MODULE, {SessionId, Service, Info}, []).
 
-%% -spec maybe_create(session_or_undefined(), service(), info()) -> pid().
+-spec maybe_create(session_or_undefined(), service(), info()) -> pid().
 maybe_create(SessionId, Service, Info) ->
     case ets:lookup(?ETS, SessionId) of
         []          -> {ok, SPid} = sockjs_session_sup:start_child(
@@ -53,7 +63,7 @@ maybe_create(SessionId, Service, Info) ->
     end.
 
 
-%% -spec received(list(iodata()), session_or_pid()) -> ok.
+-spec received(list(iodata()), session_or_pid()) -> ok.
 received(Messages, SessionPid) when is_pid(SessionPid) ->
     case gen_server:call(SessionPid, {received, Messages}, infinity) of
         ok    -> ok;
@@ -63,27 +73,27 @@ received(Messages, SessionPid) when is_pid(SessionPid) ->
 received(Messages, SessionId) ->
     received(Messages, spid(SessionId)).
 
-%% -spec send(iodata(), handle()) -> ok.
+-spec send(iodata(), handle()) -> ok.
 send(Data, {?MODULE, {SPid, _}}) ->
     gen_server:cast(SPid, {send, Data}),
     ok.
 
-%% -spec close(non_neg_integer(), string(), handle()) -> ok.
+-spec close(non_neg_integer(), string(), handle()) -> ok.
 close(Code, Reason, {?MODULE, {SPid, _}}) ->
     gen_server:cast(SPid, {close, Code, Reason}),
     ok.
 
-%% -spec info(handle()) -> info().
+-spec info(handle()) -> info().
 info({?MODULE, {_SPid, Info}}) ->
     Info.
 
-%% -spec reply(session_or_pid()) ->
-%%                   wait | session_in_use | {ok | close, frame()}.
+-spec reply(session_or_pid()) ->
+                   wait | session_in_use | {ok | close, frame()}.
 reply(Session) ->
     reply(Session, true).
 
-%% -spec reply(session_or_pid(), boolean()) ->
-%%                   wait | session_in_use | {ok | close, frame()}.
+-spec reply(session_or_pid(), boolean()) ->
+                   wait | session_in_use | {ok | close, frame()}.
 reply(SessionPid, Multiple) when is_pid(SessionPid) ->
     gen_server:call(SessionPid, {reply, self(), Multiple}, infinity);
 reply(SessionId, Multiple) ->
@@ -154,7 +164,7 @@ unmark_waiting(RPid, State = #session{response_pid    = Pid,
   when Pid =/= undefined andalso Pid =/= RPid ->
     State.
 
-%% -spec emit(emittable(), #session{}) -> #session{}.
+-spec emit(emittable(), #session{}) -> #session{}.
 emit(What, State = #session{callback = Callback,
                             state    = UserState,
                             handle   = Handle}) ->
@@ -175,7 +185,7 @@ emit(What, State = #session{callback = Callback,
 
 %% --------------------------------------------------------------------------
 
-%% -spec init({session_or_undefined(), service(), info()}) -> {ok, #session{}}.
+-spec init({session_or_undefined(), service(), info()}) -> {ok, #session{}}.
 init({SessionId, #service{callback         = Callback,
                           state            = UserState,
                           disconnect_delay = DisconnectDelay,
similarity index 88%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session_sup.erl
rename to deps/sockjs/src/sockjs_session_sup.erl
index 71c7ff45c567cec20b92d569d5ca8f9b72abc169..4197ce321f2f523593c31eecdc62a7943c42cb05 100644 (file)
@@ -7,7 +7,7 @@
 
 %% --------------------------------------------------------------------------
 
-%% -spec start_link() -> ignore | {'ok', pid()} | {'error', any()}.
+-spec start_link() -> ignore | {'ok', pid()} | {'error', any()}.
 start_link() ->
      supervisor:start_link({local, ?MODULE}, ?MODULE, []).
 
similarity index 88%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_util.erl
rename to deps/sockjs/src/sockjs_util.erl
index 9b9969dc7fac39959f93bf26ec2405c421816f5e..be3f972f7c26da61a46d7614bc743f144d0af329 100644 (file)
@@ -8,7 +8,7 @@
 
 %% --------------------------------------------------------------------------
 
-%% -spec rand32() -> non_neg_integer().
+-spec rand32() -> non_neg_integer().
 rand32() ->
     case get(random_seeded) of
         undefined ->
@@ -21,7 +21,7 @@ rand32() ->
     random:uniform(erlang:trunc(math:pow(2,32)))-1.
 
 
-%% -spec encode_frame(frame()) -> iodata().
+-spec encode_frame(frame()) -> iodata().
 encode_frame({open, nil}) ->
     <<"o">>;
 encode_frame({close, {Code, Reason}}) ->
@@ -34,7 +34,7 @@ encode_frame({heartbeat, nil}) ->
     <<"h">>.
 
 
-%% -spec url_escape(string(), string()) -> iolist().
+-spec url_escape(string(), string()) -> iolist().
 url_escape(Str, Chars) ->
     [case lists:member(Char, Chars) of
          true  -> hex(Char);
similarity index 88%
rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_ws_handler.erl
rename to deps/sockjs/src/sockjs_ws_handler.erl
index c011c898ac4b10f14be38bb2d7f467ab7e285fac..bcf463d81d49710c95d427b22352fdcf8da89b30 100644 (file)
@@ -6,7 +6,7 @@
 
 %% --------------------------------------------------------------------------
 
-%% -spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown.
+-spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown.
 %% Ignore empty
 received(_RawWebsocket, _SessionPid, <<>>) ->
     ok;
@@ -30,7 +30,7 @@ session_received(Messages, SessionPid) ->
         no_session -> shutdown
     end.
 
-%% -spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait.
+-spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait.
 reply(websocket, SessionPid) ->
     case sockjs_session:reply(SessionPid) of
         {W, Frame} when W =:= ok orelse W =:= close->
@@ -52,7 +52,7 @@ reply(rawwebsocket, SessionPid) ->
             wait
     end.
 
-%% -spec close(websocket|rawwebsocket, pid()) -> ok.
+-spec close(websocket|rawwebsocket, pid()) -> ok.
 close(_RawWebsocket, SessionPid) ->
     SessionPid ! force_shutdown,
     ok.
diff --git a/deps/webmachine/Makefile b/deps/webmachine/Makefile
new file mode 100644 (file)
index 0000000..a3ed568
--- /dev/null
@@ -0,0 +1,25 @@
+IGNORE_DEPS += edown eper eunit_formatters meck node_package rebar_lock_deps_plugin rebar_vsn_plugin reltool_util
+C_SRC_DIR = /path/do/not/exist
+C_SRC_TYPE = rebar
+DRV_CFLAGS = -fPIC
+export DRV_CFLAGS
+ERLANG_ARCH = 64
+export ERLANG_ARCH
+ERLC_OPTS = +debug_info
+export ERLC_OPTS
+ERLC_OPTS += -Dold_hash=1
+
+DEPS += mochiweb
+dep_mochiweb = git git://github.com/rabbitmq/mochiweb 845428379ed8b58eadc49aba26838d86ea809663
+COMPILE_FIRST +=
+
+
+rebar_dep: preprocess pre-deps deps pre-app app
+
+preprocess::
+
+pre-deps::
+
+pre-app::
+
+include ../../erlang.mk
\ No newline at end of file
diff --git a/deps/webmachine/rebar.config b/deps/webmachine/rebar.config
new file mode 100644 (file)
index 0000000..b7c5143
--- /dev/null
@@ -0,0 +1,9 @@
+%%-*- mode: erlang -*-
+{erl_opts, [warnings_as_errors]}.
+{cover_enabled, true}.
+{edoc_opts, [{preprocess, true}]}.
+
+{deps, [
+        {mochiweb, ".*", {git, "git://github.com/rabbitmq/mochiweb",
+                            "845428379ed8b58eadc49aba26838d86ea809663"}}
+        ]}.
diff --git a/deps/webmachine/src/webmachine.app.src b/deps/webmachine/src/webmachine.app.src
new file mode 100644 (file)
index 0000000..51454fa
--- /dev/null
@@ -0,0 +1,8 @@
+{application,webmachine,
+             [{description,"webmachine"},
+              {vsn,"1.10.3"},
+              {modules,[]},
+              {registered,[]},
+              {applications,[kernel,stdlib,mochiweb]},
+              {mod,{webmachine_app,[]}},
+              {env,[]}]}.
diff --git a/erlang.mk b/erlang.mk
new file mode 100644 (file)
index 0000000..6d2a31c
--- /dev/null
+++ b/erlang.mk
@@ -0,0 +1,6738 @@
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+
+ERLANG_MK_VERSION = 2.0.0-pre.2-144-g647ffd1
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN   " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A0 -noinput -boot start_clean
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+       $(verbose) :
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+       $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+distclean-tmp:
+       $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+       $(verbose) printf "%s\n" \
+               "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+               "Copyright (c) 2013-2015 Loïc Hoguin <essen@ninenines.eu>" \
+               "" \
+               "Usage: [V=1] $(MAKE) [target]..." \
+               "" \
+               "Core targets:" \
+               "  all           Run deps, app and rel targets in that order" \
+               "  app           Compile the project" \
+               "  deps          Fetch dependencies (if needed) and compile them" \
+               "  fetch-deps    Fetch dependencies recursively (if needed) without compiling them" \
+               "  list-deps     List dependencies recursively on stdout" \
+               "  search q=...  Search for a package in the built-in index" \
+               "  rel           Build a release for this project, if applicable" \
+               "  docs          Build the documentation for this project" \
+               "  install-docs  Install the man pages for this project" \
+               "  check         Compile and run all tests and analysis for this project" \
+               "  tests         Run the tests for this project" \
+               "  clean         Delete temporary and output files from most targets" \
+               "  distclean     Delete all temporary and output files" \
+               "  help          Display this help and exit" \
+               "  erlang-mk     Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty)        $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(subst \,\\\\,$(shell cygpath -w $1))
+else
+core_native_path = $1
+endif
+
+ifeq ($(shell which wget 2>/dev/null | wc -l), 1)
+define core_http_get
+       wget --no-check-certificate -O $(1) $(2)|| rm $(1)
+endef
+else
+define core_http_get.erl
+       ssl:start(),
+       inets:start(),
+       case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of
+               {ok, {{_, 200, _}, _, Body}} ->
+                       case file:write_file("$(1)", Body) of
+                               ok -> ok;
+                               {error, R1} -> halt(R1)
+                       end;
+               {error, R2} ->
+                       halt(R2)
+       end,
+       halt(0).
+endef
+
+define core_http_get
+       $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2))
+endef
+endif
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2)))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk:
+       git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ifdef ERLANG_MK_COMMIT
+       cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+endif
+       if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+       $(MAKE) -C $(ERLANG_MK_BUILD_DIR)
+       cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+       rm -rf $(ERLANG_MK_BUILD_DIR)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY.  It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += classifier
+pkg_classifier_name = classifier
+pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier
+pkg_classifier_homepage = https://github.com/inaka/classifier
+pkg_classifier_fetch = git
+pkg_classifier_repo = https://github.com/inaka/classifier
+pkg_classifier_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D    NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dtl
+pkg_dtl_name = dtl
+pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang.
+pkg_dtl_homepage = https://github.com/oinksoft/dtl
+pkg_dtl_fetch = git
+pkg_dtl_repo = https://github.com/oinksoft/dtl
+pkg_dtl_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/knutin/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/knutin/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = exec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards  and  for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gossiperl
+pkg_gossiperl_name = gossiperl
+pkg_gossiperl_description = Gossip middleware in Erlang
+pkg_gossiperl_homepage = http://gossiperl.com/
+pkg_gossiperl_fetch = git
+pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl
+pkg_gossiperl_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/basho/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/basho/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/basho/lager
+pkg_lager_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = Erlang MySQL Driver (from code.google.com)
+pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver
+pkg_mysql_commit = master
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er    lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool    : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://github.com/krestenkrab/triq
+pkg_triq_fetch = git
+pkg_triq_repo = https://github.com/krestenkrab/triq
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+       $(verbose) printf "%s\n" \
+               $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name:    $(1)") \
+               "App name:    $(pkg_$(1)_name)" \
+               "Description: $(pkg_$(1)_description)" \
+               "Home page:   $(pkg_$(1)_homepage)" \
+               "Fetch with:  $(pkg_$(1)_fetch)" \
+               "Repository:  $(pkg_$(1)_repo)" \
+               "Commit:      $(pkg_$(1)_commit)" \
+               ""
+
+endef
+
+search:
+ifdef q
+       $(foreach p,$(PACKAGES), \
+               $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+                       $(call pkg_print,$(p))))
+else
+       $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+dep_repo = $(patsubst git://github.com/%,https://github.com/%, \
+       $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+       ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+       ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP   " $(1);
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Core targets.
+
+ifdef IS_APP
+apps::
+else
+apps:: $(ALL_APPS_DIRS)
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               mkdir -p $$dep/ebin || exit $$?; \
+       done
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+                       $(MAKE) -C $$dep IS_APP=1 || exit $$?; \
+               fi \
+       done
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log
+endif
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \
+               if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+                       :; \
+               else \
+                       echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+                       if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+                               $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \
+                       else \
+                               echo "Error: No Makefile to build dependency $$dep."; \
+                               exit 2; \
+                       fi \
+               fi \
+       done
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+       if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+               $(call dep_autopatch_erlang_mk,$(1)); \
+       elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \
+                       $(call dep_autopatch2,$(1)); \
+               else \
+                       $(call erlang,$(call dep_autopatch_app.erl,$(1))); \
+               fi \
+       else \
+               if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+                       $(call dep_autopatch_noop,$(1)); \
+               else \
+                       $(call dep_autopatch2,$(1)); \
+               fi \
+       fi
+endef
+
+define dep_autopatch2
+       if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+               $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+       fi; \
+       $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+       if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \
+               $(call dep_autopatch_fetch_rebar); \
+               $(call dep_autopatch_rebar,$(1)); \
+       else \
+               $(call dep_autopatch_gen,$(1)); \
+       fi
+endef
+
+define dep_autopatch_noop
+       printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Overwrite erlang.mk with the current file by default.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+       echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \
+               > $(DEPS_DIR)/$1/erlang.mk
+endef
+else
+define dep_autopatch_erlang_mk
+       :
+endef
+endif
+
+define dep_autopatch_gen
+       printf "%s\n" \
+               "ERLC_OPTS = +debug_info" \
+               "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+define dep_autopatch_fetch_rebar
+       mkdir -p $(ERLANG_MK_TMP); \
+       if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+               git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \
+               cd $(ERLANG_MK_TMP)/rebar; \
+               git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \
+               $(MAKE); \
+               cd -; \
+       fi
+endef
+
+define dep_autopatch_rebar
+       if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+               mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+       fi; \
+       $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+       rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+       application:load(rebar),
+       application:set_env(rebar, log_level, debug),
+       Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+               {ok, Conf0} -> Conf0;
+               _ -> []
+       end,
+       {Conf, OsEnv} = fun() ->
+               case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+                       false -> {Conf1, []};
+                       true ->
+                               Bindings0 = erl_eval:new_bindings(),
+                               Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+                               Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+                               Before = os:getenv(),
+                               {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+                               {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+               end
+       end(),
+       Write = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+       end,
+       Escape = fun (Text) ->
+               re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+       end,
+       Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+               "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+       Write("C_SRC_DIR = /path/do/not/exist\n"),
+       Write("C_SRC_TYPE = rebar\n"),
+       Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+       Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+       fun() ->
+               Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+               case lists:keyfind(erl_opts, 1, Conf) of
+                       false -> ok;
+                       {_, ErlOpts} ->
+                               lists:foreach(fun
+                                       ({d, D}) ->
+                                               Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                       ({i, I}) ->
+                                               Write(["ERLC_OPTS += -I ", I, "\n"]);
+                                       ({platform_define, Regex, D}) ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n");
+                                                       false -> ok
+                                               end;
+                                       ({parse_transform, PT}) ->
+                                               Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n");
+                                       (_) -> ok
+                               end, ErlOpts)
+               end,
+               Write("\n")
+       end(),
+       fun() ->
+               File = case lists:keyfind(deps, 1, Conf) of
+                       false -> [];
+                       {_, Deps} ->
+                               [begin case case Dep of
+                                                       {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}};
+                                                       {N, S} when is_tuple(S) -> {N, S};
+                                                       {N, _, S} -> {N, S};
+                                                       {N, _, S, _} -> {N, S};
+                                                       _ -> false
+                                               end of
+                                       false -> ok;
+                                       {Name, Source} ->
+                                               {Method, Repo, Commit} = case Source of
+                                                       {hex, V} -> {hex, V, undefined};
+                                                       {git, R} -> {git, R, master};
+                                                       {M, R, {branch, C}} -> {M, R, C};
+                                                       {M, R, {ref, C}} -> {M, R, C};
+                                                       {M, R, {tag, C}} -> {M, R, C};
+                                                       {M, R, C} -> {M, R, C}
+                                               end,
+                                               Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+                               end end || Dep <- Deps]
+               end
+       end(),
+       fun() ->
+               case lists:keyfind(erl_first_files, 1, Conf) of
+                       false -> ok;
+                       {_, Files} ->
+                               Names = [[" ", case lists:reverse(F) of
+                                       "lre." ++ Elif -> lists:reverse(Elif);
+                                       Elif -> lists:reverse(Elif)
+                               end] || "src/" ++ F <- Files],
+                               Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+               end
+       end(),
+       Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+       Write("\npreprocess::\n"),
+       Write("\npre-deps::\n"),
+       Write("\npre-app::\n"),
+       PatchHook = fun(Cmd) ->
+               case Cmd of
+                       "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+                       "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+                       _ -> Escape(Cmd)
+               end
+       end,
+       fun() ->
+               case lists:keyfind(pre_hooks, 1, Conf) of
+                       false -> ok;
+                       {_, Hooks} ->
+                               [case H of
+                                       {'get-deps', Cmd} ->
+                                               Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+                                       {compile, Cmd} ->
+                                               Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                       {Regex, compile, Cmd} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+                                                       false -> ok
+                                               end;
+                                       _ -> ok
+                               end || H <- Hooks]
+               end
+       end(),
+       ShellToMk = fun(V) ->
+               re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]),
+                       "-Werror\\\\b", "", [{return, list}, global])
+       end,
+       PortSpecs = fun() ->
+               case lists:keyfind(port_specs, 1, Conf) of
+                       false ->
+                               case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+                                       false -> [];
+                                       true ->
+                                               [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+                                                       proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+                               end;
+                       {_, Specs} ->
+                               lists:flatten([case S of
+                                       {Output, Input} -> {ShellToMk(Output), Input, []};
+                                       {Regex, Output, Input} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, []};
+                                                       false -> []
+                                               end;
+                                       {Regex, Output, Input, [{env, Env}]} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {ShellToMk(Output), Input, Env};
+                                                       false -> []
+                                               end
+                               end || S <- Specs])
+               end
+       end(),
+       PortSpecWrite = fun (Text) ->
+               file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+       end,
+       case PortSpecs of
+               [] -> ok;
+               _ ->
+                       Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"),
+                       PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+                               [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+                       PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L \\"~s\\" -lerl_interface -lei\n",
+                               [code:lib_dir(erl_interface, lib)])),
+                       [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+                       FilterEnv = fun(Env) ->
+                               lists:flatten([case E of
+                                       {_, _} -> E;
+                                       {Regex, K, V} ->
+                                               case rebar_utils:is_arch(Regex) of
+                                                       true -> {K, V};
+                                                       false -> []
+                                               end
+                               end || E <- Env])
+                       end,
+                       MergeEnv = fun(Env) ->
+                               lists:foldl(fun ({K, V}, Acc) ->
+                                       case lists:keyfind(K, 1, Acc) of
+                                               false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+                                               {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+                                       end
+                               end, [], Env)
+                       end,
+                       PortEnv = case lists:keyfind(port_env, 1, Conf) of
+                               false -> [];
+                               {_, PortEnv0} -> FilterEnv(PortEnv0)
+                       end,
+                       PortSpec = fun ({Output, Input0, Env}) ->
+                               filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+                               Input = [[" ", I] || I <- Input0],
+                               PortSpecWrite([
+                                       [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+                                       case $(PLATFORM) of
+                                               darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+                                               _ -> ""
+                                       end,
+                                       "\n\nall:: ", Output, "\n\n",
+                                       "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+                                       [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+                                       Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+                                               "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+                                       "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+                                       case {filename:extension(Output), $(PLATFORM)} of
+                                           {[], _} -> "\n";
+                                           {_, darwin} -> "\n";
+                                           _ -> " -shared\n"
+                                       end])
+                       end,
+                       [PortSpec(S) || S <- PortSpecs]
+       end,
+       Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"),
+       RunPlugin = fun(Plugin, Step) ->
+               case erlang:function_exported(Plugin, Step, 2) of
+                       false -> ok;
+                       true ->
+                               c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+                               Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+                                       dict:store(base_dir, "", dict:new())}, undefined),
+                               io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+               end
+       end,
+       fun() ->
+               case lists:keyfind(plugins, 1, Conf) of
+                       false -> ok;
+                       {_, Plugins} ->
+                               [begin
+                                       case lists:keyfind(deps, 1, Conf) of
+                                               false -> ok;
+                                               {_, Deps} ->
+                                                       case lists:keyfind(P, 1, Deps) of
+                                                               false -> ok;
+                                                               _ ->
+                                                                       Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+                                                                       io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+                                                                       code:add_patha(Path ++ "/ebin")
+                                                       end
+                                       end
+                               end || P <- Plugins],
+                               [case code:load_file(P) of
+                                       {module, P} -> ok;
+                                       _ ->
+                                               case lists:keyfind(plugin_dir, 1, Conf) of
+                                                       false -> ok;
+                                                       {_, PluginsDir} ->
+                                                               ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+                                                               {ok, P, Bin} = compile:file(ErlFile, [binary]),
+                                                               {module, P} = code:load_binary(P, ErlFile, Bin)
+                                               end
+                               end || P <- Plugins],
+                               [RunPlugin(P, preprocess) || P <- Plugins],
+                               [RunPlugin(P, pre_compile) || P <- Plugins],
+                               [RunPlugin(P, compile) || P <- Plugins]
+               end
+       end(),
+       halt()
+endef
+
+define dep_autopatch_app.erl
+       UpdateModules = fun(App) ->
+               case filelib:is_regular(App) of
+                       false -> ok;
+                       true ->
+                               {ok, [{application, '$(1)', L0}]} = file:consult(App),
+                               Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true,
+                                       fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []),
+                               L = lists:keystore(modules, 1, L0, {modules, Mods}),
+                               ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}]))
+               end
+       end,
+       UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"),
+       halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+       AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcScript = AppSrc ++ ".script",
+       Bindings = erl_eval:new_bindings(),
+       {ok, Conf} = file:script(AppSrcScript, Bindings),
+       ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+       halt()
+endef
+
+define dep_autopatch_appsrc.erl
+       AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+       AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+       case filelib:is_regular(AppSrcIn) of
+               false -> ok;
+               true ->
+                       {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+                       L1 = lists:keystore(modules, 1, L0, {modules, []}),
+                       L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end,
+                       L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+                       ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+                       case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+       end,
+       halt()
+endef
+
+define dep_fetch_git
+       git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-submodule
+       git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+       hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+       svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+       cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_hex.erl
+       ssl:start(),
+       inets:start(),
+       {ok, {{_, 200, _}, _, Body}} = httpc:request(get,
+               {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []},
+               [], [{body_format, binary}]),
+       {ok, Files} = erl_tar:extract({binary, Body}, [memory]),
+       {_, Source} = lists:keyfind("contents.tar.gz", 1, Files),
+       ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]),
+       halt()
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+       $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1))))));
+endef
+
+define dep_fetch_fail
+       echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+       exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+       $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+       git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+       cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_fetch
+       $(if $(dep_$(1)), \
+               $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \
+                       $(word 1,$(dep_$(1))), \
+                       $(if $(IS_DEP),legacy,fail)), \
+               $(if $(filter $(1),$(PACKAGES)), \
+                       $(pkg_$(1)_fetch), \
+                       fail))
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1):
+       $(eval DEP_NAME := $(call dep_name,$1))
+       $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+       $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+               echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \
+               exit 17; \
+       fi
+       $(verbose) mkdir -p $(DEPS_DIR)
+       $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+       $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+                       && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+               echo " AUTO  " $(1); \
+               cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+       fi
+       - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+               echo " CONF  " $(DEP_STR); \
+               cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+       fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+       $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi; \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+                       echo " PATCH  Downloading rabbitmq-server"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+               fi; \
+               ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+       elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+               if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+                       echo " PATCH  Downloading rabbitmq-codegen"; \
+                       git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+               fi \
+       else \
+               $$(call dep_autopatch,$(DEP_NAME)) \
+       fi
+endif
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \
+       done
+
+distclean:: distclean-apps
+
+distclean-apps:
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \
+       done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+       $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+define core_dep_plugin
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endef
+
+$(foreach p,$(DEP_PLUGINS),\
+       $(eval $(if $(findstring /,$p),\
+               $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+               $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL   " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+ifdef DTL_FULL_PATH
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%))))
+else
+BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES))))
+endif
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(DTL_FILES); \
+       fi
+       @touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+       [begin
+               Module0 = case "$(strip $(DTL_FULL_PATH))" of
+                       "" ->
+                               filename:basename(F, ".dtl");
+                       _ ->
+                               "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"),
+                               re:replace(F2, "/",  "_",  [{return, list}, global])
+               end,
+               Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+               case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of
+                       ok -> ok;
+                       {ok, _} -> ok
+               end
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+       $(if $(strip $?),\
+               $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?),-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))
+
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+define compile_proto
+       $(verbose) mkdir -p ebin/ include/
+       $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1)))
+       $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl
+       $(verbose) rm ebin/*.erl
+endef
+
+define compile_proto.erl
+       [begin
+               Dir = filename:dirname(filename:dirname(F)),
+               protobuffs_compile:generate_source(F,
+                       [{output_include_dir, Dir ++ "/include"},
+                               {output_src_dir, Dir ++ "/ebin"}])
+       end || F <- string:tokens("$(1)", " ")],
+       halt().
+endef
+
+ifneq ($(wildcard src/),)
+ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto))
+       $(if $(strip $?),$(call compile_proto,$?))
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+       +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP   " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP   " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC  " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+       $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL  " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1  " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB   " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+ifeq ($(wildcard ebin/test),)
+app:: deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+else
+app:: clean deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build
+endif
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, []},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}
+]}.
+endef
+else
+define app_file
+{application, $(PROJECT), [
+       {description, "$(PROJECT_DESCRIPTION)"},
+       {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+       {id$(comma)$(space)"$(1)"}$(comma))
+       {modules, [$(call comma_list,$(2))]},
+       {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+       {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]},
+       {mod, {$(PROJECT_MOD), []}}
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+       $(verbose) :
+
+# Source files.
+
+ERL_FILES = $(sort $(call core_find,src/,*.erl))
+CORE_FILES = $(sort $(call core_find,src/,*.core))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+       $(verbose) mkdir -p include/
+       $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1)
+       $(verbose) mv asn1/*.erl src/
+       $(verbose) mv asn1/*.hrl include/
+       $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+       $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+       $(verbose) mkdir -p include/ priv/mibs/
+       $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+       $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES = $(sort $(call core_find,src/,*.xrl))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES = $(sort $(call core_find,src/,*.yrl))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+       $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+       E = ets:new(makedep, [bag]),
+       G = digraph:new([acyclic]),
+       ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+       Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+       Add = fun (Mod, Dep) ->
+               case lists:keyfind(Dep, 1, Modules) of
+                       false -> ok;
+                       {_, DepFile} ->
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile}),
+                               digraph:add_vertex(G, Mod),
+                               digraph:add_vertex(G, Dep),
+                               digraph:add_edge(G, Mod, Dep)
+               end
+       end,
+       AddHd = fun (F, Mod, DepFile) ->
+               case file:open(DepFile, [read]) of
+                       {error, enoent} -> ok;
+                       {ok, Fd} ->
+                               F(F, Fd, Mod),
+                               {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+                               ets:insert(E, {ModFile, DepFile})
+               end
+       end,
+       Attr = fun
+               (F, Mod, behavior, Dep) -> Add(Mod, Dep);
+               (F, Mod, behaviour, Dep) -> Add(Mod, Dep);
+               (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep);
+               (F, Mod, compile, Opts) when is_list(Opts) ->
+                       case proplists:get_value(parse_transform, Opts) of
+                               undefined -> ok;
+                               Dep -> Add(Mod, Dep)
+                       end;
+               (F, Mod, include, Hrl) ->
+                       case filelib:is_file("include/" ++ Hrl) of
+                               true -> AddHd(F, Mod, "include/" ++ Hrl);
+                               false ->
+                                       case filelib:is_file("src/" ++ Hrl) of
+                                               true -> AddHd(F, Mod, "src/" ++ Hrl);
+                                               false -> false
+                                       end
+                       end;
+               (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl);
+               (F, Mod, import, {Imp, _}) ->
+                       case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of
+                               false -> ok;
+                               true -> Add(Mod, Imp)
+                       end;
+               (_, _, _, _) -> ok
+       end,
+       MakeDepend = fun(F, Fd, Mod) ->
+               case io:parse_erl_form(Fd, undefined) of
+                       {ok, {attribute, _, Key, Value}, _} ->
+                               Attr(F, Mod, Key, Value),
+                               F(F, Fd, Mod);
+                       {eof, _} ->
+                               file:close(Fd);
+                       _ ->
+                               F(F, Fd, Mod)
+               end
+       end,
+       [begin
+               Mod = list_to_atom(filename:basename(F, ".erl")),
+               {ok, Fd} = file:open(F, [read]),
+               MakeDepend(MakeDepend, Fd, Mod)
+       end || F <- ErlFiles],
+       Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+       CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+       ok = file:write_file("$(1)", [
+               [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+               "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n"
+       ]),
+       halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+       $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST)
+       @mkdir -p $(ERLANG_MK_TMP)
+       @if test -f $@; then \
+               touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+               touch -c $(PROJECT).d; \
+       fi
+       @touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+
+-include $(PROJECT).d
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+       $(verbose) mkdir -p ebin/
+
+define compile_erl
+       $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+               -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+       $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+       $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+       $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true))
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+               $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+       $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \
+               > ebin/$(PROJECT).app
+else
+       $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+               echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \
+               exit 1; \
+       fi
+       $(appsrc_verbose) cat src/$(PROJECT).app.src \
+               | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+               | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+               > ebin/$(PROJECT).app
+endif
+
+clean:: clean-app
+
+clean-app:
+       $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+               $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+               $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+               $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir:
+       $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \
+               $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/
+endif
+
+ifeq ($(wildcard src),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps
+       $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+else
+ifeq ($(wildcard ebin/test),)
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: clean deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+       $(gen_verbose) touch ebin/test
+else
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: deps test-deps $(PROJECT).d
+       $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)"
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+       $(gen_verbose) rm -f $(TEST_DIR)/*.beam
+endif
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+       $(if $(findstring +,$1),\
+               $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+       $(if $(filter hex,$(call dep_fetch,$d)),\
+               {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+               {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+$(eval _compat_rebar_config = $$(compat_rebar_config))
+$(eval export _compat_rebar_config)
+
+rebar.config:
+       $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+
+docs:: asciidoc
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc doc-deps
+       a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+       a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+endif
+
+ifeq ($(wildcard doc/src/manual/*.asciidoc),)
+asciidoc-manual:
+else
+asciidoc-manual: distclean-asciidoc doc-deps
+       for f in doc/src/manual/*.asciidoc ; do \
+               a2x -v -f manpage $$f ; \
+       done
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p doc/man$$s/ ; \
+               mv doc/src/manual/*.$$s doc/man$$s/ ; \
+               gzip doc/man$$s/*.$$s ; \
+       done
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+       for s in $(MAN_SECTIONS); do \
+               mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \
+               install -g `id -u` -o `id -g` -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \
+       done
+endif
+
+distclean:: distclean-asciidoc
+
+distclean-asciidoc:
+       $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Bootstrap targets:" \
+               "  bootstrap          Generate a skeleton of an OTP application" \
+               "  bootstrap-lib      Generate a skeleton of an OTP library" \
+               "  bootstrap-rel      Generate the files needed to build a release" \
+               "  new-app in=NAME    Create a new local OTP application NAME" \
+               "  new-lib in=NAME    Create a new local OTP library NAME" \
+               "  new t=TPL n=NAME   Generate a module NAME based on the template TPL" \
+               "  new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+               "  list-templates     List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]},
+       {mod, {$p_app, []}},
+       {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+       {description, ""},
+       {vsn, "0.1.0"},
+       {id, "git"},
+       {modules, []},
+       {registered, []},
+       {applications, [
+               kernel,
+               stdlib
+       ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+ifdef SP
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+
+endef
+else
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+endef
+endif
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.0.1
+
+include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+       $p_sup:start_link().
+
+stop(_State) ->
+       ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p]}.
+{extended_start_script, true}.
+{sys_config, "rel/sys.config"}.
+{vm_args, "rel/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+       supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+       Procs = [],
+       {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+       {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+       {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+       {noreply, State}.
+
+handle_info(_Info, State) ->
+       {noreply, State}.
+
+terminate(_Reason, _State) ->
+       ok.
+
+code_change(_OldVsn, State, _Extra) ->
+       {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+       {ok, Req2} = cowboy_req:reply(200, Req),
+       {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+       gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+       {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+       {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+       {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+       {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+       {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+       ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+       {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+       {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+       {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+       {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+       {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+       {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+       {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+       Req2 = cowboy_req:compact(Req),
+       {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+       {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+       {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+       {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+       {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+       ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+       socket :: inet:socket(),
+       transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+       Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+       {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+       ok = ranch:accept_ack(Ref),
+       loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+       loop(State).
+endef
+
+# Plugin-specific targets.
+
+define render_template
+       $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(eval n := $(PROJECT)_sup)
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc,src/$(PROJECT).app.src)
+endif
+       $(call render_template,bs_app,src/$(PROJECT)_app.erl)
+       $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+       $(error Error: src/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_Makefile,Makefile)
+       $(verbose) echo "include erlang.mk" >> Makefile
+       $(verbose) mkdir src/
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+       $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard rel/),)
+       $(error Error: rel/ directory already exists)
+endif
+       $(eval p := $(PROJECT))
+       $(call render_template,bs_relx_config,relx.config)
+       $(verbose) mkdir rel/
+       $(call render_template,bs_sys_config,rel/sys.config)
+       $(call render_template,bs_vm_args,rel/vm.args)
+
+new-app:
+ifndef in
+       $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(eval n := $(in)_sup)
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+       $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+       $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+       $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+       $(error Error: Application $in already exists)
+endif
+       $(eval p := $(in))
+       $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+       $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+       $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+       $(error Error: src/ directory does not exist)
+endif
+ifndef t
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef tpl_$(t)
+       $(error Unknown template)
+endif
+ifndef n
+       $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in=
+else
+       $(call render_template,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+       $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+       C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+       C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+       C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+       CC = /mingw64/bin/gcc
+       export CC
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall
+       LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+       CC ?= cc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+       CC ?= gcc
+       CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+       CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+       CFLAGS += -fPIC
+       CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C     " $(?F);
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP   " $(?F);
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD    " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+       $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+       $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+       $(verbose) mkdir -p priv/
+       $(link_verbose) $(CC) $(OBJECTS) \
+               $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+               -o $(C_SRC_OUTPUT_FILE)
+
+%.o: %.c
+       $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+       $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+       $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+$(C_SRC_ENV):
+       $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+               io_lib:format( \
+                       \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+                       \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+                       \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \
+                       [code:root_dir(), erlang:system_info(version), \
+                       code:lib_dir(erl_interface, include), \
+                       code:lib_dir(erl_interface, lib)])), \
+               halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+       $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+       /* Initialize private data. */
+       *priv_data = NULL;
+
+       loads++;
+
+       return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+       /* Convert the private data to the new version. */
+       *priv_data = *old_priv_data;
+
+       loads++;
+
+       return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+       if (loads == 1) {
+               /* Destroy the private data. */
+       }
+
+       loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+       if (enif_is_atom(env, argv[0])) {
+               return enif_make_tuple2(env,
+                       enif_make_atom(env, "hello"),
+                       argv[0]);
+       }
+
+       return enif_make_tuple2(env,
+               enif_make_atom(env, "error"),
+               enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+       {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+       PrivDir = case code:priv_dir(?MODULE) of
+               {error, _} ->
+                       AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+                       filename:join(AppPath, "priv");
+               Path ->
+                       Path
+       end,
+       erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+       erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+       $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+       $(error Error: src/$n.erl already exists)
+endif
+ifdef in
+       $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+       $(verbose) mkdir -p $(C_SRC_DIR) src/
+       $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c)
+       $(call render_template,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-setup distclean-kerl
+
+KERL ?= $(CURDIR)/kerl
+export KERL
+
+KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+CI_INSTALL_DIR ?= $(HOME)/erlang
+CI_OTP ?=
+
+ifeq ($(strip $(CI_OTP)),)
+ci::
+else
+ci:: $(addprefix ci-,$(CI_OTP))
+
+ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP))
+
+ci-setup::
+
+ci_verbose_0 = @echo " CI    " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$(1): $(CI_INSTALL_DIR)/$(1)
+       $(ci_verbose) \
+               PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \
+               CI_OTP_RELEASE="$(1)" \
+               CT_OPTS="-label $(1)" \
+               $(MAKE) clean ci-setup tests
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp))))
+
+define ci_otp_target
+ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),)
+$(CI_INSTALL_DIR)/$(1): $(KERL)
+       $(KERL) build git $(OTP_GIT) $(1) $(1)
+       $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1)
+endif
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp))))
+
+$(KERL):
+       $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL))
+       $(verbose) chmod +x $(KERL)
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Continuous Integration targets:" \
+               "  ci          Run '$(MAKE) tests' on all configured Erlang versions." \
+               "" \
+               "The CI_OTP variable must be defined with the Erlang versions" \
+               "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+       $(gen_verbose) rm -rf $(KERL)
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+ifneq ($(wildcard $(TEST_DIR)),)
+       CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+else
+       CT_SUITES ?=
+endif
+
+# Core targets.
+
+tests:: ct
+
+distclean:: distclean-ct
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Common_test targets:" \
+               "  ct          Run all the common_test suites for this project" \
+               "" \
+               "All your common_test suites have their associated targets." \
+               "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+       -no_auto_compile \
+       -noinput \
+       -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \
+       -dir $(TEST_DIR) \
+       -logdir $(CURDIR)/logs
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP),,apps-ct)
+else
+ct: test-build $(if $(IS_APP),,apps-ct)
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1:
+       $(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: test-build $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifndef t
+CT_EXTRA =
+else
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+       $(verbose) mkdir -p $(CURDIR)/logs/
+       $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+       $(gen_verbose) rm -rf $(CURDIR)/logs/
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Dialyzer targets:" \
+               "  plt         Build a PLT file for this project" \
+               "  dialyze     Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+       Opts = init:get_plain_arguments(),
+       {Filtered, _} = lists:foldl(fun
+               (O,                         {Os, true}) -> {[O|Os], false};
+               (O = "-D",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$D, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-I",                  {Os, _})    -> {[O|Os], true};
+               (O = [\\$$-, \\$$I, _ | _], {Os, _})    -> {[O|Os], false};
+               (O = "-pa",                 {Os, _})    -> {[O|Os], true};
+               (_,                         Acc)        -> Acc
+       end, {[], false}, Opts),
+       io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+       halt().
+endef
+
+$(DIALYZER_PLT): deps app
+       $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS)
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+       $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze:
+else
+dialyze: $(DIALYZER_PLT)
+endif
+       $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+
+# Core targets.
+
+ifneq ($(wildcard doc/overview.edoc),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+       $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().'
+
+distclean-edoc:
+       $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info
+
+# Copyright (c) 2014 Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+
+ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*"
+ESCRIPT_SYS_CONFIG ?= "rel/sys.config"
+ESCRIPT_EMU_ARGS ?= -pa . \
+       -sasl errlog_type error \
+       -escript main $(ESCRIPT_NAME)
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**"
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Escript targets:" \
+               "  escript     Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl
+# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center
+# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE :
+# Software may only be used for the great good and the true happiness of all
+# sentient beings.
+
+define ESCRIPT_RAW
+'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\
+'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\
+'  [F || F <- A, not filelib:is_dir(F) ] end,'\
+'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\
+'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\
+'Ez = fun(Escript) ->'\
+'  Static = Files([$(ESCRIPT_STATIC)]),'\
+'  Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\
+'  Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\
+'  escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\
+'    {archive, Archive, [memory]},'\
+'    {shebang, "$(ESCRIPT_SHEBANG)"},'\
+'    {comment, "$(ESCRIPT_COMMENT)"},'\
+'    {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\
+'  ]),'\
+'  file:change_mode(Escript, 8#755)'\
+'end,'\
+'Ez("$(ESCRIPT_FILE)"),'\
+'halt().'
+endef
+
+ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW))
+
+escript:: distclean-escript deps app
+       $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND)
+
+distclean-escript:
+       $(gen_verbose) rm -f $(ESCRIPT_NAME)
+
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "EUnit targets:" \
+               "  eunit       Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       case cover:compile_beam_directory("ebin") of
+                               {error, _} -> halt(1);
+                               _ -> ok
+                       end
+       end,
+       case eunit:test($1, [$(EUNIT_OPTS)]) of
+               ok -> ok;
+               error -> halt(2)
+       end,
+       case "$(COVER)" of
+               "" -> ok;
+               _ ->
+                       cover:export("eunit.coverdata")
+       end,
+       halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build
+       $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+       $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP),,apps-eunit)
+       $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit:
+       $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done
+endif
+endif
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel distclean-relx-rel distclean-relx run
+
+# Configuration.
+
+RELX ?= $(CURDIR)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+       RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+       RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+endif
+endif
+
+distclean:: distclean-relx-rel distclean-relx
+
+# Plugin-specific targets.
+
+$(RELX):
+       $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+       $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+       $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS)
+
+distclean-relx-rel:
+       $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+distclean-relx:
+       $(gen_verbose) rm -rf $(RELX)
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run:
+else
+
+define get_relx_release.erl
+       {ok, Config} = file:consult("$(RELX_CONFIG)"),
+       {release, {Name, _}, _} = lists:keyfind(release, 1, Config),
+       io:format("~s", [Name]),
+       halt(0).
+endef
+
+RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))`
+
+run: all
+       $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Relx targets:" \
+               "  run         Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Shell targets:" \
+               "  shell       Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+       $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done
+
+shell: build-shell-deps
+       $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright (c) 2015, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+       code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]),
+       try
+               case $(1) of
+                       all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+                       module -> triq:check($(2));
+                       function -> triq:check($(2))
+               end
+       of
+               true -> halt(0);
+               _ -> halt(1)
+       catch error:undef ->
+               io:format("Undefined property or module~n"),
+               halt(0)
+       end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build
+       $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build
+       $(verbose) echo Testing $(t)/0
+       $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build
+       $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam))))))
+       $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+       XREFR_ARGS :=
+else
+       XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr
+
+# Core targets.
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Xref targets:" \
+               "  xref        Run Xrefr using $XREF_CONFIG as config file if defined"
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+       $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+       $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+       $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+       $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR = cover
+
+# Hook in coverage to ct
+
+ifdef COVER
+ifdef CT_RUN
+# All modules in 'ebin'
+COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam)))
+
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec:
+       $(verbose) echo Cover mods: $(COVER_MODS)
+       $(gen_verbose) printf "%s\n" \
+               '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \
+               '{export,"$(CURDIR)/ct.coverdata"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+       $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+       $(verbose) printf "%s\n" "" \
+               "Cover targets:" \
+               "  cover-report  Generate a HTML coverage report from previously collected" \
+               "                cover data." \
+               "  all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \
+               "" \
+               "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+               "target tests additionally generates a HTML coverage report from the combined" \
+               "coverdata files from each of these testing tools. HTML reports can be disabled" \
+               "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+       $(gen_verbose) rm -f *.coverdata ct.cover.spec
+
+# Merge all coverdata files into one.
+all.coverdata: $(COVERDATA)
+       $(gen_verbose) $(ERL) -eval ' \
+               $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \
+               cover:export("$@"), halt(0).'
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+       $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+       grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+       | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+       $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+       Ms = cover:imported_modules(),
+       [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+               ++ ".COVER.html", [html])  || M <- Ms],
+       Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+       EunitHrlMods = [$(EUNIT_HRL_MODS)],
+       Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+               true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+       TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+       TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+       Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+       TotalPerc = Perc(TotalY, TotalN),
+       {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+       io:format(F, "<!DOCTYPE html><html>~n"
+               "<head><meta charset=\"UTF-8\">~n"
+               "<title>Coverage report</title></head>~n"
+               "<body>~n", []),
+       io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+       io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+       [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+               "<td>~p%</td></tr>~n",
+               [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+       How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+       Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+       io:format(F, "</table>~n"
+               "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+               "</body></html>", [How, Date]),
+       halt().
+endef
+
+cover-report:
+       $(gen_verbose) mkdir -p $(COVER_REPORT_DIR)
+       $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+       fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+       $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+       $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps.log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) mkdir -p $(ERLANG_MK_TMP)
+       $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ifndef IS_APP
+       $(verbose) for dep in $(ALL_APPS_DIRS) ; do \
+               $(MAKE) -C $$dep $@ \
+                IS_APP=1 \
+                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                || exit $$?; \
+       done
+endif
+       $(verbose) for dep in $^ ; do \
+               if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+                       echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+                       if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \
+                        $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+                               $(MAKE) -C $$dep fetch-deps \
+                                IS_DEP=1 \
+                                ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \
+                                || exit $$?; \
+                       fi \
+               fi \
+       done
+ifeq ($(IS_APP)$(IS_DEP),)
+       $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | uniq > $@
+       $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+       list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+       $(verbose) cat $^
diff --git a/git-revisions.txt b/git-revisions.txt
new file mode 100644 (file)
index 0000000..6766bc9
--- /dev/null
@@ -0,0 +1,34 @@
+rabbitmq_server_release ad4565fa57e98c9183db8e9f7acc9ce6ce1ee48f rabbitmq_v3_6_6
+amqp_client ca59f72a6f7c151f1eaeb0131163909791ae6284 rabbitmq_v3_6_6
+cowboy b8e4115eb13488c517d8d8ef33c47d0eaa7838c6 1.0.3
+cowlib 7d8a571b1e50602d701ca203fbf28036b2cf80f5 1.0.1
+mochiweb a1ed381e1c4f56c7d1eaee2c2cb725719905a84a master
+rabbit 3be6cd4bb31f4a7a99fe1e5cd4652766a08c3c40 rabbitmq_v3_6_6
+rabbit_common 97e014fc01136bbd6da31fb3490f6e0209a386e2 rabbitmq_v3_6_6
+rabbitmq_amqp1_0 102609e93e84d2935698bf9d07ad5ba7ccae1133 rabbitmq_v3_6_6
+rabbitmq_auth_backend_ldap 1ee91f8022d1c2e4b825144faf9d7efe65146a37 rabbitmq_v3_6_6
+rabbitmq_auth_mechanism_ssl 4538833b754e581a1c4b5000b8ab3a3752b0551e rabbitmq_v3_6_6
+rabbitmq_codegen 4e725d8cafeaca969082beb0b5fa7d48f7f738fe rabbitmq_v3_6_6
+rabbitmq_consistent_hash_exchange a0a239bbfb3ae17ac9b01e20ea43175acd990e65 rabbitmq_v3_6_6
+rabbitmq_event_exchange b4b6331b060ab7a6db37298227bbef4905df641a rabbitmq_v3_6_6
+rabbitmq_federation f80e793331ef80fb96949f0b927cd609e0fdd952 rabbitmq_v3_6_6
+rabbitmq_federation_management 354f9bf661cc19f1f53bc277be25676e272a584b rabbitmq_v3_6_6
+rabbitmq_jms_topic_exchange a549a8a1a434a52d904cee92566442f025c0059b rabbitmq_v3_6_6
+rabbitmq_management a5f9d42785a251383bfe1e673df9ab260e1a778d rabbitmq_v3_6_6
+rabbitmq_management_agent 82c31a6d4a3c740d65b50dadfc04b46c4ca3fc02 rabbitmq_v3_6_6
+rabbitmq_management_visualiser 4211cbb1108ac3562f02c899ee217444d3a86bfd rabbitmq_v3_6_6
+rabbitmq_mqtt 0cee514e400c57061b4c2663e9170059ea1dfadf rabbitmq_v3_6_6
+rabbitmq_recent_history_exchange 9881652e4af40964bbe804a4dcd70fd745d4d171 rabbitmq_v3_6_6
+rabbitmq_sharding 7beaab51586015445edac2e31553289925fc252a rabbitmq_v3_6_6
+rabbitmq_shovel 6749519c6833881886f354092d49ab9036d8f90d rabbitmq_v3_6_6
+rabbitmq_shovel_management 715cd498cbd24e288ef501214714baf57af17941 rabbitmq_v3_6_6
+rabbitmq_stomp 215dde6d30d2b4a302930528e2a1b0debcc15446 rabbitmq_v3_6_6
+rabbitmq_top d12a3bf21ebd0a6aa5d2bc76474c1f5d2ef2040a rabbitmq_v3_6_6
+rabbitmq_tracing b9900be98f2929a8c472393b40ac7ec74e5cf7ff rabbitmq_v3_6_6
+rabbitmq_trust_store 5a1ce3e2354e421423d7ea2048daa9d6d8216cbe rabbitmq_v3_6_6
+rabbitmq_web_dispatch 98b24e030c043456645fbbaa9df9f2268c4d460f rabbitmq_v3_6_6
+rabbitmq_web_stomp 64d3b9c829f92c087b295f0347efce2d23d14e15 rabbitmq_v3_6_6
+rabbitmq_web_stomp_examples 85f33ca3ccce7bba094a657300c43054af8a0af6 rabbitmq_v3_6_6
+ranch a5d2efcde9a34ad38ab89a26d98ea5335e88625a 1.2.1
+sockjs 7e7112a4935a9aaa89e97954eb612534fa0f6229 master
+webmachine 6b5210c0ed07159f43222255e05a90bbef6c8cbe 
diff --git a/rabbitmq-components.mk b/rabbitmq-components.mk
new file mode 100644 (file)
index 0000000..05986d8
--- /dev/null
@@ -0,0 +1,284 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client                       = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit                            = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common                     = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0                  = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp        = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http        = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap        = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl       = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser    = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_clusterer                = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen                  = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers               = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client            = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange           = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation               = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management    = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client              = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client               = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange       = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc                      = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management               = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent         = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange      = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes        = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_visualiser    = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp        = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome                = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt                     = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client              = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange  = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp       = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange          = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release           = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding                 = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel                   = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management        = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp                    = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke                     = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top                      = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing                  = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store              = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test                     = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch             = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp                = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples       = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt                 = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples        = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website                  = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_sockjs                            = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master
+dep_toke                              = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella          = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk
+# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch
+# needs to add "ranch" as a BUILD_DEPS. The list of projects needing
+# this workaround are:
+#     o  rabbitmq-web-stomp
+dep_ranch = git https://github.com/ninenines/ranch 1.2.1
+
+RABBITMQ_COMPONENTS = amqp_client \
+                     rabbit \
+                     rabbit_common \
+                     rabbitmq_amqp1_0 \
+                     rabbitmq_auth_backend_amqp \
+                     rabbitmq_auth_backend_http \
+                     rabbitmq_auth_backend_ldap \
+                     rabbitmq_auth_mechanism_ssl \
+                     rabbitmq_boot_steps_visualiser \
+                     rabbitmq_clusterer \
+                     rabbitmq_codegen \
+                     rabbitmq_consistent_hash_exchange \
+                     rabbitmq_ct_helpers \
+                     rabbitmq_delayed_message_exchange \
+                     rabbitmq_dotnet_client \
+                     rabbitmq_event_exchange \
+                     rabbitmq_federation \
+                     rabbitmq_federation_management \
+                     rabbitmq_java_client \
+                     rabbitmq_jms_client \
+                     rabbitmq_jms_topic_exchange \
+                     rabbitmq_lvc \
+                     rabbitmq_management \
+                     rabbitmq_management_agent \
+                     rabbitmq_management_exchange \
+                     rabbitmq_management_themes \
+                     rabbitmq_management_visualiser \
+                     rabbitmq_message_timestamp \
+                     rabbitmq_metronome \
+                     rabbitmq_mqtt \
+                     rabbitmq_objc_client \
+                     rabbitmq_recent_history_exchange \
+                     rabbitmq_routing_node_stamp \
+                     rabbitmq_rtopic_exchange \
+                     rabbitmq_server_release \
+                     rabbitmq_sharding \
+                     rabbitmq_shovel \
+                     rabbitmq_shovel_management \
+                     rabbitmq_stomp \
+                     rabbitmq_toke \
+                     rabbitmq_top \
+                     rabbitmq_tracing \
+                     rabbitmq_trust_store \
+                     rabbitmq_web_dispatch \
+                     rabbitmq_web_mqtt \
+                     rabbitmq_web_mqtt_examples \
+                     rabbitmq_web_stomp \
+                     rabbitmq_web_stomp_examples \
+                     rabbitmq_website
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+       ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+       if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+base_rmq_ref := $(shell \
+       (git rev-parse --verify -q stable >/dev/null && \
+         git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \
+         echo stable) || \
+       echo master)
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+#   - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+#   target's properties:
+#       eg. rabbitmq-common is replaced by rabbitmq-codegen
+#       eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Maccro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+#   1. /foo.git -> /bar.git
+#   2. /foo     -> /bar
+#   3. /foo/    -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)),                                    \
+                 $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))),       \
+                 $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+       fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+       fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+       if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+        git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url1"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+       elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+           fetch_url="$$$$fetch_url2"; \
+           push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+       fi; \
+       cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+       $(foreach ref,$(call dep_rmq_commits,$(1)), \
+         git checkout -q $(ref) >/dev/null 2>&1 || \
+         ) \
+       (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+         1>&2 && false) ) && \
+       (test "$$$$fetch_url" = "$$$$push_url" || \
+        git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+       @:
+
+prepare-dist::
+       @:
+
+# --------------------------------------------------------------------
+# rabbitmq-components.mk checks.
+# --------------------------------------------------------------------
+
+# If this project is under the Umbrella project, we override $(DEPS_DIR)
+# to point to the Umbrella's one. We also disable `make distclean` so
+# $(DEPS_DIR) is not accidentally removed.
+
+ifneq ($(wildcard ../../UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+else ifneq ($(wildcard UMBRELLA.md),)
+UNDER_UMBRELLA = 1
+endif
+
+ifeq ($(UNDER_UMBRELLA),1)
+ifneq ($(PROJECT),rabbitmq_public_umbrella)
+DEPS_DIR ?= $(abspath ..)
+endif
+
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
+
+UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
+
+check-rabbitmq-components.mk:
+       $(verbose) cmp -s rabbitmq-components.mk \
+               $(UPSTREAM_RMQ_COMPONENTS_MK) || \
+               (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
+                 false)
+
+ifeq ($(PROJECT),rabbit_common)
+rabbitmq-components-mk:
+       @:
+else
+rabbitmq-components-mk:
+       $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
+ifeq ($(DO_COMMIT),yes)
+       $(verbose) git diff --quiet rabbitmq-components.mk \
+       || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
+endif
+endif
diff --git a/rabbitmq-server/Makefile b/rabbitmq-server/Makefile
deleted file mode 100644 (file)
index c2cae4a..0000000
+++ /dev/null
@@ -1,437 +0,0 @@
-TMPDIR ?= /tmp
-
-RABBITMQ_NODENAME ?= rabbit
-RABBITMQ_SERVER_START_ARGS ?=
-RABBITMQ_MNESIA_DIR ?= $(TMPDIR)/rabbitmq-$(RABBITMQ_NODENAME)-mnesia
-RABBITMQ_PLUGINS_EXPAND_DIR ?= $(TMPDIR)/rabbitmq-$(RABBITMQ_NODENAME)-plugins-scratch
-RABBITMQ_LOG_BASE ?= $(TMPDIR)
-
-DEPS_FILE=deps.mk
-SOURCE_DIR=src
-TEST_DIR=test/src
-EBIN_DIR=ebin
-TEST_EBIN_DIR=test/ebin
-INCLUDE_DIR=include
-DOCS_DIR=docs
-INCLUDES=$(wildcard $(INCLUDE_DIR)/*.hrl) $(INCLUDE_DIR)/rabbit_framing.hrl
-SOURCES=$(wildcard $(SOURCE_DIR)/*.erl) $(SOURCE_DIR)/rabbit_framing_amqp_0_9_1.erl $(SOURCE_DIR)/rabbit_framing_amqp_0_8.erl $(USAGES_ERL)
-TEST_SOURCES=$(wildcard $(TEST_DIR)/*.erl)
-BEAM_TARGETS=$(patsubst $(SOURCE_DIR)/%.erl, $(EBIN_DIR)/%.beam, $(SOURCES))
-TEST_BEAM_TARGETS=$(patsubst $(TEST_DIR)/%.erl, $(TEST_EBIN_DIR)/%.beam, $(TEST_SOURCES))
-TARGETS=$(EBIN_DIR)/rabbit.app $(INCLUDE_DIR)/rabbit_framing.hrl $(BEAM_TARGETS) plugins
-TEST_TARGETS=$(TEST_BEAM_TARGETS)
-WEB_URL=http://www.rabbitmq.com/
-MANPAGES=$(patsubst %.xml, %.gz, $(wildcard $(DOCS_DIR)/*.[0-9].xml))
-WEB_MANPAGES=$(patsubst %.xml, %.man.xml, $(wildcard $(DOCS_DIR)/*.[0-9].xml) $(DOCS_DIR)/rabbitmq-service.xml $(DOCS_DIR)/rabbitmq-echopid.xml)
-USAGES_XML=$(DOCS_DIR)/rabbitmqctl.1.xml $(DOCS_DIR)/rabbitmq-plugins.1.xml
-USAGES_ERL=$(foreach XML, $(USAGES_XML), $(call usage_xml_to_erl, $(XML)))
-
-ifeq ($(shell python -c 'import simplejson' 2>/dev/null && echo yes),yes)
-PYTHON=python
-else
-ifeq ($(shell python2.7 -c 'import json' 2>/dev/null && echo yes),yes)
-PYTHON=python2.7
-else
-ifeq ($(shell python2.6 -c 'import simplejson' 2>/dev/null && echo yes),yes)
-PYTHON=python2.6
-else
-ifeq ($(shell python2.5 -c 'import simplejson' 2>/dev/null && echo yes),yes)
-PYTHON=python2.5
-else
-# Hmm. Missing simplejson?
-PYTHON=python
-endif
-endif
-endif
-endif
-
-BASIC_PLT=basic.plt
-RABBIT_PLT=rabbit.plt
-
-ifndef USE_PROPER_QC
-# PropEr needs to be installed for property checking
-# http://proper.softlab.ntua.gr/
-USE_PROPER_QC=$(shell erl -noshell -eval 'io:format({module, proper} =:= code:ensure_loaded(proper)), halt().')
-endif
-
-#other args: +native +"{hipe,[o3,verbose]}" -Ddebug=true +debug_info +no_strict_record_tests
-ERLC_OPTS=-I $(INCLUDE_DIR) -Wall +warn_export_vars -v +debug_info $(call boolean_macro,$(USE_SPECS),use_specs) $(call boolean_macro,$(USE_PROPER_QC),use_proper_qc)
-
-# Our type specs rely on dict:dict/0 etc, which are only available in
-# 17.0 upwards.
-define compare_version
-$(shell awk 'BEGIN {
-       split("$(1)", v1, "\.");
-       version1 = v1[1] * 1000000 + v1[2] * 10000 + v1[3] * 100 + v1[4];
-
-       split("$(2)", v2, "\.");
-       version2 = v2[1] * 1000000 + v2[2] * 10000 + v2[3] * 100 + v2[4];
-
-       if (version1 $(3) version2) {
-               print "true";
-       } else {
-               print "false";
-       }
-}')
-endef
-
-ERTS_VER = $(shell erl -version 2>&1 | sed -E 's/.* version //')
-USE_SPECS_MIN_ERTS_VER = 5.11
-ifeq ($(call compare_version,$(ERTS_VER),$(USE_SPECS_MIN_ERTS_VER),>=),true)
-ERLC_OPTS += -Duse_specs
-endif
-
-ifdef INSTRUMENT_FOR_QC
-ERLC_OPTS += -DINSTR_MOD=gm_qc
-else
-ERLC_OPTS += -DINSTR_MOD=gm
-endif
-
-include version.mk
-
-PLUGINS_SRC_DIR?=$(shell [ -d "plugins-src" ] && echo "plugins-src" || echo )
-PLUGINS_DIR=plugins
-TARBALL_NAME=rabbitmq-server-$(VERSION)
-TARGET_SRC_DIR=dist/$(TARBALL_NAME)
-
-SIBLING_CODEGEN_DIR=../rabbitmq-codegen/
-AMQP_CODEGEN_DIR=$(shell [ -d $(SIBLING_CODEGEN_DIR) ] && echo $(SIBLING_CODEGEN_DIR) || echo codegen)
-AMQP_SPEC_JSON_FILES_0_9_1=$(AMQP_CODEGEN_DIR)/amqp-rabbitmq-0.9.1.json $(AMQP_CODEGEN_DIR)/credit_extension.json
-AMQP_SPEC_JSON_FILES_0_8=$(AMQP_CODEGEN_DIR)/amqp-rabbitmq-0.8.json
-
-ERL_CALL=erl_call -sname $(RABBITMQ_NODENAME) -e
-
-ERL_EBIN=erl -noinput -pa $(EBIN_DIR)
-
-define usage_xml_to_erl
-  $(subst __,_,$(patsubst $(DOCS_DIR)/rabbitmq%.1.xml, $(SOURCE_DIR)/rabbit_%_usage.erl, $(subst -,_,$(1))))
-endef
-
-define usage_dep
-  $(call usage_xml_to_erl, $(1)): $(1) $(DOCS_DIR)/usage.xsl
-endef
-
-define boolean_macro
-$(if $(filter true,$(1)),-D$(2))
-endef
-
-ifneq "$(SBIN_DIR)" ""
-ifneq "$(TARGET_DIR)" ""
-SCRIPTS_REL_PATH=$(shell ./calculate-relative $(TARGET_DIR)/sbin $(SBIN_DIR))
-endif
-endif
-
-# Versions prior to this are not supported
-NEED_MAKE := 3.80
-ifneq "$(NEED_MAKE)" "$(firstword $(sort $(NEED_MAKE) $(MAKE_VERSION)))"
-$(error Versions of make prior to $(NEED_MAKE) are not supported)
-endif
-
-# .DEFAULT_GOAL introduced in 3.81
-DEFAULT_GOAL_MAKE := 3.81
-ifneq "$(DEFAULT_GOAL_MAKE)" "$(firstword $(sort $(DEFAULT_GOAL_MAKE) $(MAKE_VERSION)))"
-.DEFAULT_GOAL=all
-endif
-
-all: $(TARGETS) $(TEST_TARGETS)
-
-.PHONY: plugins check-xref
-ifneq "$(PLUGINS_SRC_DIR)" ""
-plugins:
-       [ -d "$(PLUGINS_SRC_DIR)/rabbitmq-server" ] || ln -s "$(CURDIR)" "$(PLUGINS_SRC_DIR)/rabbitmq-server"
-       mkdir -p $(PLUGINS_DIR)
-       PLUGINS_SRC_DIR="" $(MAKE) -C "$(PLUGINS_SRC_DIR)" plugins-dist PLUGINS_DIST_DIR="$(CURDIR)/$(PLUGINS_DIR)" VERSION=$(VERSION)
-       echo "Put your EZs here and use rabbitmq-plugins to enable them." > $(PLUGINS_DIR)/README
-       rm -f $(PLUGINS_DIR)/rabbit_common*.ez
-
-# add -q to remove printout of warnings....
-check-xref: $(BEAM_TARGETS) $(PLUGINS_DIR)
-       rm -rf lib
-       ./check_xref $(PLUGINS_DIR) -q
-
-else
-plugins:
-# Not building plugins
-
-check-xref:
-       $(info xref checks are disabled as there is no plugins-src directory)
-
-endif
-
-$(DEPS_FILE): $(SOURCES) $(INCLUDES)
-       rm -f $@
-       echo $(subst : ,:,$(foreach FILE,$^,$(FILE):)) | escript generate_deps $@ $(EBIN_DIR)
-
-$(EBIN_DIR)/rabbit.app: $(EBIN_DIR)/rabbit_app.in $(SOURCES) generate_app
-       escript generate_app $< $@ $(SOURCE_DIR)
-
-$(EBIN_DIR)/%.beam: $(SOURCE_DIR)/%.erl | $(DEPS_FILE)
-       erlc -o $(EBIN_DIR) $(ERLC_OPTS) -pa $(EBIN_DIR) $<
-
-$(TEST_EBIN_DIR)/%.beam: $(TEST_DIR)/%.erl | $(TEST_EBIN_DIR)
-       erlc -o $(TEST_EBIN_DIR) $(ERLC_OPTS) -pa $(EBIN_DIR) -pa $(TEST_EBIN_DIR) $<
-
-$(TEST_EBIN_DIR):
-       mkdir -p $(TEST_EBIN_DIR)
-
-$(INCLUDE_DIR)/rabbit_framing.hrl: codegen.py $(AMQP_CODEGEN_DIR)/amqp_codegen.py $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8)
-       $(PYTHON) codegen.py --ignore-conflicts header $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8) $@
-
-$(SOURCE_DIR)/rabbit_framing_amqp_0_9_1.erl: codegen.py $(AMQP_CODEGEN_DIR)/amqp_codegen.py $(AMQP_SPEC_JSON_FILES_0_9_1)
-       $(PYTHON) codegen.py body $(AMQP_SPEC_JSON_FILES_0_9_1) $@
-
-$(SOURCE_DIR)/rabbit_framing_amqp_0_8.erl: codegen.py $(AMQP_CODEGEN_DIR)/amqp_codegen.py $(AMQP_SPEC_JSON_FILES_0_8)
-       $(PYTHON) codegen.py body $(AMQP_SPEC_JSON_FILES_0_8) $@
-
-dialyze: $(BEAM_TARGETS) $(BASIC_PLT)
-       dialyzer --plt $(BASIC_PLT) --no_native --fullpath \
-          $(BEAM_TARGETS)
-
-# rabbit.plt is used by rabbitmq-erlang-client's dialyze make target
-create-plt: $(RABBIT_PLT)
-
-$(RABBIT_PLT): $(BEAM_TARGETS) $(BASIC_PLT)
-       dialyzer --plt $(BASIC_PLT) --output_plt $@ --no_native \
-         --add_to_plt $(BEAM_TARGETS)
-
-$(BASIC_PLT): $(BEAM_TARGETS)
-       if [ -f $@ ]; then \
-           touch $@; \
-       else \
-           dialyzer --output_plt $@ --build_plt \
-               --apps erts kernel stdlib compiler sasl os_mon mnesia tools \
-                 public_key crypto ssl xmerl; \
-       fi
-
-clean:
-       rm -f $(EBIN_DIR)/*.beam
-       rm -f $(EBIN_DIR)/rabbit.app $(EBIN_DIR)/rabbit.boot $(EBIN_DIR)/rabbit.script $(EBIN_DIR)/rabbit.rel
-       rm -rf $(TEST_EBIN_DIR)
-       rm -f $(PLUGINS_DIR)/*.ez
-       [ -d "$(PLUGINS_SRC_DIR)" ] && PLUGINS_SRC_DIR="" PRESERVE_CLONE_DIR=1 make -C $(PLUGINS_SRC_DIR) clean || true
-       rm -f $(INCLUDE_DIR)/rabbit_framing.hrl $(SOURCE_DIR)/rabbit_framing_amqp_*.erl codegen.pyc
-       rm -f $(DOCS_DIR)/*.[0-9].gz $(DOCS_DIR)/*.man.xml $(DOCS_DIR)/*.erl $(USAGES_ERL)
-       rm -f $(RABBIT_PLT)
-       rm -f $(DEPS_FILE)
-
-cleandb:
-       rm -rf $(RABBITMQ_MNESIA_DIR)/*
-
-############ various tasks to interact with RabbitMQ ###################
-
-BASIC_SCRIPT_ENVIRONMENT_SETTINGS=\
-       RABBITMQ_NODE_IP_ADDRESS="$(RABBITMQ_NODE_IP_ADDRESS)" \
-       RABBITMQ_NODE_PORT="$(RABBITMQ_NODE_PORT)" \
-       RABBITMQ_LOG_BASE="$(RABBITMQ_LOG_BASE)" \
-       RABBITMQ_MNESIA_DIR="$(RABBITMQ_MNESIA_DIR)" \
-       RABBITMQ_PLUGINS_EXPAND_DIR="$(RABBITMQ_PLUGINS_EXPAND_DIR)"
-
-run: all
-       $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \
-               RABBITMQ_ALLOW_INPUT=true \
-               RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \
-               ./scripts/rabbitmq-server
-
-run-background: all
-       $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \
-               RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \
-               ./scripts/rabbitmq-server -detached
-
-run-node: all
-       $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \
-               RABBITMQ_NODE_ONLY=true \
-               RABBITMQ_ALLOW_INPUT=true \
-               RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \
-               ./scripts/rabbitmq-server
-
-run-background-node: all
-       $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \
-               RABBITMQ_NODE_ONLY=true \
-               RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \
-               ./scripts/rabbitmq-server -detached
-
-run-tests: all
-       echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL)
-       echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) -n hare || true
-       OUT=$$(echo "rabbit_tests:all_tests()." | $(ERL_CALL)) ; \
-         echo $$OUT ; echo $$OUT | grep '^{ok, passed}$$' > /dev/null
-
-run-qc: all
-       echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL)
-       ./quickcheck $(RABBITMQ_NODENAME) rabbit_backing_queue_qc 100 40
-       ./quickcheck $(RABBITMQ_NODENAME) gm_qc 1000 200
-
-start-background-node: all
-       -rm -f $(RABBITMQ_MNESIA_DIR).pid
-       mkdir -p $(RABBITMQ_MNESIA_DIR)
-       $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \
-               RABBITMQ_NODE_ONLY=true \
-               RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \
-               ./scripts/rabbitmq-server \
-               > $(RABBITMQ_MNESIA_DIR)/startup_log \
-               2> $(RABBITMQ_MNESIA_DIR)/startup_err &
-       ./scripts/rabbitmqctl -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_MNESIA_DIR).pid kernel
-
-start-rabbit-on-node: all
-       echo "rabbit:start()." | $(ERL_CALL)
-       ./scripts/rabbitmqctl -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_MNESIA_DIR).pid
-
-stop-rabbit-on-node: all
-       echo "rabbit:stop()." | $(ERL_CALL)
-
-set-resource-alarm: all
-       echo "rabbit_alarm:set_alarm({{resource_limit, $(SOURCE), node()}, []})." | \
-       $(ERL_CALL)
-
-clear-resource-alarm: all
-       echo "rabbit_alarm:clear_alarm({resource_limit, $(SOURCE), node()})." | \
-       $(ERL_CALL)
-
-stop-node:
-       -( \
-       pid=$$(./scripts/rabbitmqctl -n $(RABBITMQ_NODENAME) eval 'os:getpid().') && \
-       $(ERL_CALL) -q && \
-       while ps -p $$pid >/dev/null 2>&1; do sleep 1; done \
-       )
-
-# code coverage will be created for subdirectory "ebin" of COVER_DIR
-COVER_DIR=.
-
-start-cover: all
-       echo "rabbit_misc:start_cover([\"rabbit\", \"hare\"])." | $(ERL_CALL)
-       echo "rabbit_misc:enable_cover([\"$(COVER_DIR)\"])." | $(ERL_CALL)
-
-stop-cover: all
-       echo "rabbit_misc:report_cover(), cover:stop()." | $(ERL_CALL)
-       cat cover/summary.txt
-
-########################################################################
-
-srcdist: distclean
-       mkdir -p $(TARGET_SRC_DIR)/codegen
-       cp -r ebin src include LICENSE LICENSE-MPL-RabbitMQ INSTALL README $(TARGET_SRC_DIR)
-       sed 's/%%VSN%%/$(VERSION)/' $(TARGET_SRC_DIR)/ebin/rabbit_app.in > $(TARGET_SRC_DIR)/ebin/rabbit_app.in.tmp && \
-               mv $(TARGET_SRC_DIR)/ebin/rabbit_app.in.tmp $(TARGET_SRC_DIR)/ebin/rabbit_app.in
-
-       cp -r $(AMQP_CODEGEN_DIR)/* $(TARGET_SRC_DIR)/codegen/
-       cp codegen.py Makefile generate_app generate_deps calculate-relative $(TARGET_SRC_DIR)
-
-       echo "VERSION?=${VERSION}" > $(TARGET_SRC_DIR)/version.mk
-
-       cp -r scripts $(TARGET_SRC_DIR)
-       cp -r $(DOCS_DIR) $(TARGET_SRC_DIR)
-       chmod 0755 $(TARGET_SRC_DIR)/scripts/*
-
-ifneq "$(PLUGINS_SRC_DIR)" ""
-       cp -r $(PLUGINS_SRC_DIR) $(TARGET_SRC_DIR)/plugins-src
-       rm $(TARGET_SRC_DIR)/LICENSE
-       cat packaging/common/LICENSE.head >> $(TARGET_SRC_DIR)/LICENSE
-       cat $(AMQP_CODEGEN_DIR)/license_info >> $(TARGET_SRC_DIR)/LICENSE
-       find $(PLUGINS_SRC_DIR)/licensing -name "license_info_*" -exec cat '{}' >> $(TARGET_SRC_DIR)/LICENSE \;
-       cat packaging/common/LICENSE.tail >> $(TARGET_SRC_DIR)/LICENSE
-       find $(PLUGINS_SRC_DIR)/licensing -name "LICENSE-*" -exec cp '{}' $(TARGET_SRC_DIR) \;
-       rm -rf $(TARGET_SRC_DIR)/licensing
-else
-       @echo No plugins source distribution found
-endif
-
-       (cd dist; tar -zchf $(TARBALL_NAME).tar.gz $(TARBALL_NAME))
-       (cd dist; zip -q -r $(TARBALL_NAME).zip $(TARBALL_NAME))
-       rm -rf $(TARGET_SRC_DIR)
-
-distclean: clean
-       $(MAKE) -C $(AMQP_CODEGEN_DIR) distclean
-       rm -rf dist
-       find . -regex '.*\(~\|#\|\.swp\|\.dump\)' -exec rm {} \;
-
-# xmlto can not read from standard input, so we mess with a tmp file.
-%.gz: %.xml $(DOCS_DIR)/examples-to-end.xsl
-       xmlto --version | grep -E '^xmlto version 0\.0\.([0-9]|1[1-8])$$' >/dev/null || opt='--stringparam man.indent.verbatims=0' ; \
-           xsltproc --novalid $(DOCS_DIR)/examples-to-end.xsl $< > $<.tmp && \
-           xmlto -o $(DOCS_DIR) $$opt man $<.tmp && \
-           gzip -f $(DOCS_DIR)/`basename $< .xml`
-       rm -f $<.tmp
-
-# Use tmp files rather than a pipeline so that we get meaningful errors
-# Do not fold the cp into previous line, it's there to stop the file being
-# generated but empty if we fail
-$(SOURCE_DIR)/%_usage.erl:
-       xsltproc --novalid --stringparam modulename "`basename $@ .erl`" \
-               $(DOCS_DIR)/usage.xsl $< > $@.tmp
-       sed -e 's/"/\\"/g' -e 's/%QUOTE%/"/g' $@.tmp > $@.tmp2
-       fold -s $@.tmp2 > $@.tmp3
-       mv $@.tmp3 $@
-       rm $@.tmp $@.tmp2
-
-# We rename the file before xmlto sees it since xmlto will use the name of
-# the file to make internal links.
-%.man.xml: %.xml $(DOCS_DIR)/html-to-website-xml.xsl
-       cp $< `basename $< .xml`.xml && \
-               xmlto xhtml-nochunks `basename $< .xml`.xml ; rm `basename $< .xml`.xml
-       cat `basename $< .xml`.html | \
-           xsltproc --novalid $(DOCS_DIR)/remove-namespaces.xsl - | \
-               xsltproc --novalid --stringparam original `basename $<` $(DOCS_DIR)/html-to-website-xml.xsl - | \
-               xmllint --format - > $@
-       rm `basename $< .xml`.html
-
-docs_all: $(MANPAGES) $(WEB_MANPAGES)
-
-install: install_bin install_docs
-
-install_bin: all install_dirs
-       cp -r ebin include LICENSE* INSTALL $(TARGET_DIR)
-
-       chmod 0755 scripts/*
-       for script in rabbitmq-env rabbitmq-server rabbitmqctl rabbitmq-plugins rabbitmq-defaults; do \
-               cp scripts/$$script $(TARGET_DIR)/sbin; \
-               [ -e $(SBIN_DIR)/$$script ] || ln -s $(SCRIPTS_REL_PATH)/$$script $(SBIN_DIR)/$$script; \
-       done
-
-       mkdir -p $(TARGET_DIR)/$(PLUGINS_DIR)
-       [ -d "$(PLUGINS_DIR)" ] && cp $(PLUGINS_DIR)/*.ez $(PLUGINS_DIR)/README $(TARGET_DIR)/$(PLUGINS_DIR) || true
-
-install_docs: docs_all install_dirs
-       for section in 1 5; do \
-               mkdir -p $(MAN_DIR)/man$$section; \
-               for manpage in $(DOCS_DIR)/*.$$section.gz; do \
-                       cp $$manpage $(MAN_DIR)/man$$section; \
-               done; \
-       done
-       if test "$(DOC_INSTALL_DIR)"; then \
-               cp $(DOCS_DIR)/rabbitmq.config.example $(DOC_INSTALL_DIR)/rabbitmq.config.example; \
-       fi
-
-install_dirs:
-       @ OK=true && \
-         { [ -n "$(TARGET_DIR)" ] || { echo "Please set TARGET_DIR."; OK=false; }; } && \
-         { [ -n "$(SBIN_DIR)" ] || { echo "Please set SBIN_DIR."; OK=false; }; } && \
-         { [ -n "$(MAN_DIR)" ] || { echo "Please set MAN_DIR."; OK=false; }; } && $$OK
-
-       mkdir -p $(TARGET_DIR)/sbin
-       mkdir -p $(SBIN_DIR)
-       mkdir -p $(MAN_DIR)
-       if test "$(DOC_INSTALL_DIR)"; then \
-               mkdir -p $(DOC_INSTALL_DIR); \
-       fi
-
-$(foreach XML,$(USAGES_XML),$(eval $(call usage_dep, $(XML))))
-
-# Note that all targets which depend on clean must have clean in their
-# name.  Also any target that doesn't depend on clean should not have
-# clean in its name, unless you know that you don't need any of the
-# automatic dependency generation for that target (e.g. cleandb).
-
-# We want to load the dep file if *any* target *doesn't* contain
-# "clean" - i.e. if removing all clean-like targets leaves something.
-
-ifeq "$(MAKECMDGOALS)" ""
-TESTABLEGOALS:=$(.DEFAULT_GOAL)
-else
-TESTABLEGOALS:=$(MAKECMDGOALS)
-endif
-
-ifneq "$(strip $(patsubst clean%,,$(patsubst %clean,,$(TESTABLEGOALS))))" ""
-include $(DEPS_FILE)
-endif
-
-.PHONY: run-qc
diff --git a/rabbitmq-server/README b/rabbitmq-server/README
deleted file mode 100644 (file)
index 67e3a66..0000000
+++ /dev/null
@@ -1 +0,0 @@
-Please see http://www.rabbitmq.com/build-server.html for build instructions.
diff --git a/rabbitmq-server/calculate-relative b/rabbitmq-server/calculate-relative
deleted file mode 100755 (executable)
index 3af18e8..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-#
-# relpath.py
-# R.Barran 30/08/2004
-# Retrieved from http://code.activestate.com/recipes/302594/
-
-import os
-import sys
-
-def relpath(target, base=os.curdir):
-    """
-    Return a relative path to the target from either the current dir or an optional base dir.
-    Base can be a directory specified either as absolute or relative to current dir.
-    """
-
-    if not os.path.exists(target):
-        raise OSError, 'Target does not exist: '+target
-
-    if not os.path.isdir(base):
-        raise OSError, 'Base is not a directory or does not exist: '+base
-
-    base_list = (os.path.abspath(base)).split(os.sep)
-    target_list = (os.path.abspath(target)).split(os.sep)
-
-    # On the windows platform the target may be on a completely different drive from the base.
-    if os.name in ['nt','dos','os2'] and base_list[0] <> target_list[0]:
-        raise OSError, 'Target is on a different drive to base. Target: '+target_list[0].upper()+', base: '+base_list[0].upper()
-
-    # Starting from the filepath root, work out how much of the filepath is
-    # shared by base and target.
-    for i in range(min(len(base_list), len(target_list))):
-        if base_list[i] <> target_list[i]: break
-    else:
-        # If we broke out of the loop, i is pointing to the first differing path elements.
-        # If we didn't break out of the loop, i is pointing to identical path elements.
-        # Increment i so that in all cases it points to the first differing path elements.
-        i+=1
-
-    rel_list = [os.pardir] * (len(base_list)-i) + target_list[i:]
-    if (len(rel_list) == 0):
-        return "."
-    return os.path.join(*rel_list)
-
-if __name__ == "__main__":
-    print(relpath(sys.argv[1], sys.argv[2]))
diff --git a/rabbitmq-server/codegen/license_info b/rabbitmq-server/codegen/license_info
deleted file mode 100644 (file)
index 1cebe90..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are
-"Copyright (C) 2008-2013 GoPivotal", Inc. and are covered by the MIT
-license.
-
diff --git a/rabbitmq-server/generate_app b/rabbitmq-server/generate_app
deleted file mode 100644 (file)
index fb0eb1e..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-main([InFile, OutFile | SrcDirs]) ->
-    Modules = [list_to_atom(filename:basename(F, ".erl")) ||
-                  SrcDir <- SrcDirs,
-                  F <- filelib:wildcard("*.erl", SrcDir)],
-    {ok, [{application, Application, Properties}]} = file:consult(InFile),
-    NewProperties =
-        case proplists:get_value(modules, Properties) of
-            [] -> lists:keyreplace(modules, 1, Properties, {modules, Modules});
-            _  -> Properties
-        end,
-    file:write_file(
-      OutFile,
-      io_lib:format("~p.~n", [{application, Application, NewProperties}])).
diff --git a/rabbitmq-server/generate_deps b/rabbitmq-server/generate_deps
deleted file mode 100644 (file)
index ddfca81..0000000
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
--mode(compile).
-
-%% We expect the list of Erlang source and header files to arrive on
-%% stdin, with the entries colon-separated.
-main([TargetFile, EbinDir]) ->
-    ErlsAndHrls = [ string:strip(S,left) ||
-                      S <- string:tokens(io:get_line(""), ":\n")],
-    ErlFiles = [F || F <- ErlsAndHrls, lists:suffix(".erl", F)],
-    Modules = sets:from_list(
-                [list_to_atom(filename:basename(FileName, ".erl")) ||
-                    FileName <- ErlFiles]),
-    HrlFiles = [F || F <- ErlsAndHrls, lists:suffix(".hrl", F)],
-    IncludeDirs = lists:usort([filename:dirname(Path) || Path <- HrlFiles]),
-    Headers = sets:from_list(HrlFiles),
-    Deps = lists:foldl(
-             fun (Path, Deps1) ->
-                     dict:store(Path, detect_deps(IncludeDirs, EbinDir,
-                                                  Modules, Headers, Path),
-                                Deps1)
-             end, dict:new(), ErlFiles),
-    {ok, Hdl} = file:open(TargetFile, [write, delayed_write]),
-    dict:fold(
-      fun (_Path, [], ok) ->
-              ok;
-          (Path, Dep, ok) ->
-              Module = filename:basename(Path, ".erl"),
-              ok = file:write(Hdl, [EbinDir, "/", Module, ".beam: ",
-                                   Path]),
-              ok = sets:fold(fun (E, ok) -> file:write(Hdl, [" ", E]) end,
-                             ok, Dep),
-              file:write(Hdl, ["\n"])
-      end, ok, Deps),
-    ok = file:write(Hdl, [TargetFile, ": ", escript:script_name(), "\n"]),
-    ok = file:sync(Hdl),
-    ok = file:close(Hdl).
-
-detect_deps(IncludeDirs, EbinDir, Modules, Headers, Path) ->
-    {ok, Forms} = epp:parse_file(Path, IncludeDirs, [{use_specs, true}]),
-    lists:foldl(
-      fun ({attribute, _LineNumber, Attribute, Behaviour}, Deps)
-          when Attribute =:= behaviour orelse Attribute =:= behavior ->
-              case sets:is_element(Behaviour, Modules) of
-                  true  -> sets:add_element(
-                             [EbinDir, "/", atom_to_list(Behaviour), ".beam"],
-                             Deps);
-                  false -> Deps
-              end;
-          ({attribute, _LineNumber, file, {FileName, _LineNumber1}}, Deps) ->
-              case sets:is_element(FileName, Headers) of
-                  true  -> sets:add_element(FileName, Deps);
-                  false -> Deps
-              end;
-          (_Form, Deps) ->
-              Deps
-      end, sets:new(), Forms).
diff --git a/rabbitmq-server/plugins-src/Makefile b/rabbitmq-server/plugins-src/Makefile
deleted file mode 100644 (file)
index 4ab8c86..0000000
+++ /dev/null
@@ -1,240 +0,0 @@
-.PHONY: default
-default:
-       @echo No default target && false
-
-REPOS:= \
-    rabbitmq-server \
-    rabbitmq-codegen \
-    rabbitmq-java-client \
-    rabbitmq-dotnet-client \
-    rabbitmq-test \
-    cowboy-wrapper \
-    eldap-wrapper \
-    mochiweb-wrapper \
-    rabbitmq-amqp1.0 \
-    rabbitmq-auth-backend-ldap \
-    rabbitmq-auth-mechanism-ssl \
-    rabbitmq-consistent-hash-exchange \
-    rabbitmq-erlang-client \
-    rabbitmq-federation \
-    rabbitmq-federation-management \
-    rabbitmq-management \
-    rabbitmq-management-agent \
-    rabbitmq-management-visualiser \
-    rabbitmq-metronome \
-    rabbitmq-web-dispatch \
-    rabbitmq-mqtt \
-    rabbitmq-shovel \
-    rabbitmq-shovel-management \
-    rabbitmq-stomp \
-    rabbitmq-toke \
-    rabbitmq-tracing \
-    rabbitmq-web-stomp \
-    rabbitmq-web-stomp-examples \
-    sockjs-erlang-wrapper \
-    toke \
-    webmachine-wrapper
-
-BRANCH:=master
-
-UMBRELLA_REPO_FETCH:=$(shell git remote -v 2>/dev/null | awk '/^origin\t.+ \(fetch\)$$/ { print $$2; }')
-ifdef UMBRELLA_REPO_FETCH
-GIT_CORE_REPOBASE_FETCH:=$(shell dirname $(UMBRELLA_REPO_FETCH))
-GIT_CORE_SUFFIX_FETCH:=$(suffix $(UMBRELLA_REPO_FETCH))
-else
-GIT_CORE_REPOBASE_FETCH:=https://github.com/rabbitmq
-GIT_CORE_SUFFIX_FETCH:=.git
-endif
-
-UMBRELLA_REPO_PUSH:=$(shell git remote -v 2>/dev/null | awk '/^origin\t.+ \(push\)$$/ { print $$2; }')
-ifdef UMBRELLA_REPO_PUSH
-GIT_CORE_REPOBASE_PUSH:=$(shell dirname $(UMBRELLA_REPO_PUSH))
-GIT_CORE_SUFFIX_PUSH:=$(suffix $(UMBRELLA_REPO_PUSH))
-else
-GIT_CORE_REPOBASE_PUSH:=git@github.com:rabbitmq
-GIT_CORE_SUFFIX_PUSH:=.git
-endif
-
-VERSION:=0.0.0
-
-ifndef VERBOSE
-QUIET:=@
-endif
-
-#----------------------------------
-
-all:
-       $(MAKE) -f all-packages.mk all-packages VERSION=$(VERSION)
-
-test:
-       $(MAKE) -f all-packages.mk test-all-packages VERSION=$(VERSION)
-
-release:
-       $(MAKE) -f all-packages.mk all-releasable VERSION=$(VERSION)
-
-clean:
-       $(MAKE) -f all-packages.mk clean-all-packages
-
-check-xref:
-       $(MAKE) -f all-packages.mk check-xref-packages
-
-plugins-dist: release
-       rm -rf $(PLUGINS_DIST_DIR)
-       mkdir -p $(PLUGINS_DIST_DIR)
-       $(MAKE) -f all-packages.mk copy-releasable VERSION=$(VERSION) PLUGINS_DIST_DIR=$(PLUGINS_DIST_DIR)
-
-plugins-srcdist:
-       rm -rf $(PLUGINS_SRC_DIST_DIR)
-       mkdir -p $(PLUGINS_SRC_DIST_DIR)/licensing
-
-       rsync -a --exclude '.git*' rabbitmq-erlang-client $(PLUGINS_SRC_DIST_DIR)/
-       touch $(PLUGINS_SRC_DIST_DIR)/rabbitmq-erlang-client/.srcdist_done
-
-       rsync -a --exclude '.git*' rabbitmq-server $(PLUGINS_SRC_DIST_DIR)/
-       touch $(PLUGINS_SRC_DIST_DIR)/rabbitmq-server/.srcdist_done
-
-       $(MAKE) -f all-packages.mk copy-srcdist VERSION=$(VERSION) PLUGINS_SRC_DIST_DIR=$(PLUGINS_SRC_DIST_DIR)
-       cp Makefile *.mk generate* $(PLUGINS_SRC_DIST_DIR)/
-       echo "This is the released version of rabbitmq-public-umbrella. \
-You can clone the full version with: git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git" > $(PLUGINS_SRC_DIST_DIR)/README
-
-       PRESERVE_CLONE_DIR=1 $(MAKE) -C $(PLUGINS_SRC_DIST_DIR) clean
-       rm -rf $(PLUGINS_SRC_DIST_DIR)/rabbitmq-server
-
-#----------------------------------
-# Convenience aliases
-
-.PHONY: co
-co: checkout
-
-.PHONY: ci
-ci: checkin
-
-.PHONY: up
-up: update
-
-.PHONY: st
-st: status
-
-.PHONY: up_c
-up_c: named_update
-
-#----------------------------------
-
-$(REPOS):
-       $(QUIET)retries=5; \
-       umbrella_branch="$$(git branch | awk '/^\* / { print $$2; }')"; \
-       if test "$$umbrella_branch" = "stable"; then \
-         branch_arg="-b $$umbrella_branch"; \
-       fi; \
-       while ! git clone $$branch_arg $(GIT_CORE_REPOBASE_FETCH)/$@$(GIT_CORE_SUFFIX_FETCH); do \
-         retries=$$((retries - 1)); \
-         if test "$$retries" = 0; then break; fi; \
-         sleep 1; \
-       done
-       $(QUIET)test -d $@
-       $(QUIET)global_user_name="$$(git config --global user.name)"; \
-       global_user_email="$$(git config --global user.email)"; \
-       user_name="$$(git config user.name)"; \
-       user_email="$$(git config user.email)"; \
-       cd $@ && \
-       git remote set-url --push origin $(GIT_CORE_REPOBASE_PUSH)/$@$(GIT_CORE_SUFFIX_PUSH) && \
-       if test "$$global_user_name" != "$$user_name"; then git config user.name "$$user_name"; fi && \
-       if test "$$global_user_email" != "$$user_email"; then git config user.email "$$user_email"; fi
-
-
-.PHONY: checkout
-checkout: $(REPOS)
-
-.PHONY: list-repos
-list-repos:
-       @for repo in $(REPOS); do echo $$repo; done
-
-.PHONY: sync-gituser
-sync-gituser:
-       @global_user_name="$$(git config --global user.name)"; \
-       global_user_email="$$(git config --global user.email)"; \
-       user_name="$$(git config user.name)"; \
-       user_email="$$(git config user.email)"; \
-       for repo in $(REPOS); do \
-       cd $$repo && \
-       git config --unset user.name && \
-       git config --unset user.email && \
-       if test "$$global_user_name" != "$$user_name"; then git config user.name "$$user_name"; fi && \
-       if test "$$global_user_email" != "$$user_email"; then git config user.email "$$user_email"; fi && \
-       cd ..; done
-
-.PHONY: sync-gitremote
-sync-gitremote:
-       @for repo in $(REPOS); do \
-       cd $$repo && \
-       git remote set-url origin $(GIT_CORE_REPOBASE_FETCH)/$$repo$(GIT_CORE_SUFFIX_FETCH) && \
-       git remote set-url --push origin $(GIT_CORE_REPOBASE_PUSH)/$$repo$(GIT_CORE_SUFFIX_PUSH) && \
-       cd ..; done
-
-#----------------------------------
-# Subrepository management
-
-
-# $(1) is the target
-# $(2) is the target dependency. Can use % to get current REPO
-# $(3) is the target body. Can use % to get current REPO
-define repo_target
-
-.PHONY: $(1)
-$(1): $(2)
-       $(3)
-
-endef
-
-# $(1) is the list of repos
-# $(2) is the suffix
-# $(3) is the target dependency. Can use % to get current REPO
-# $(4) is the target body. Can use % to get current REPO
-define repo_targets
-$(foreach REPO,$(1),$(call repo_target,$(REPO)+$(2),\
-       $(patsubst %,$(3),$(REPO)),$(patsubst %,$(4),$(REPO))))
-endef
-
-# Do not allow status to fork with -j otherwise output will be garbled
-.PHONY: status
-status: checkout
-       @for repo in . $(REPOS); do \
-               echo "$$repo:"; \
-               cd "$$repo" && git status -s && cd - >/dev/null; \
-       done
-
-.PHONY: pull
-pull: $(foreach DIR,. $(REPOS),$(DIR)+pull)
-
-$(eval $(call repo_targets,. $(REPOS),pull,| %,\
-       (cd % && git fetch -p && \
-        (! git symbolic-ref -q HEAD || git pull --ff-only))))
-
-.PHONY: update
-update: pull
-
-.PHONY: named_update
-named_update: $(foreach DIR,. $(REPOS),$(DIR)+named_update)
-
-$(eval $(call repo_targets,. $(REPOS),named_update,| %,\
-       (cd % && git fetch -p && git checkout $(BRANCH) && \
-        (! git symbolic-ref -q HEAD || git pull --ff-only))))
-
-.PHONY: tag
-tag: $(foreach DIR,. $(REPOS),$(DIR)+tag)
-
-$(eval $(call repo_targets,. $(REPOS),tag,| %,\
-       (cd % && git tag $(TAG))))
-
-.PHONY: push
-push: $(foreach DIR,. $(REPOS),$(DIR)+push)
-
-$(eval $(call repo_targets,. $(REPOS),push,| %,\
-       (cd % && git push && git push --tags)))
-
-.PHONY: checkin
-checkin: $(foreach DIR,. $(REPOS),$(DIR)+checkin)
-
-$(eval $(call repo_targets,. $(REPOS),checkin,| %,\
-       (cd % && (test -z "$$$$(git status -s -uno)" || git commit -a))))
diff --git a/rabbitmq-server/plugins-src/README b/rabbitmq-server/plugins-src/README
deleted file mode 100644 (file)
index 58177d4..0000000
+++ /dev/null
@@ -1 +0,0 @@
-This is the released version of rabbitmq-public-umbrella. You can clone the full version with: git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git
diff --git a/rabbitmq-server/plugins-src/all-packages.mk b/rabbitmq-server/plugins-src/all-packages.mk
deleted file mode 100644 (file)
index 1d02a3d..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-UMBRELLA_BASE_DIR:=.
-
-include common.mk
-
-CHAIN_TESTS:=true
-
-# Pull in all the packages
-$(foreach PACKAGE_MK,$(wildcard */package.mk),$(eval $(call do_package,$(call canonical_path,$(patsubst %/,%,$(dir $(PACKAGE_MK)))))))
-
-# ...and the non-integrated ones
-$(foreach V,$(.VARIABLES),$(if $(filter NON_INTEGRATED_%,$(filter-out NON_INTEGRATED_DEPS_%,$V)),$(eval $(call do_package,$(subst NON_INTEGRATED_,,$V)))))
-
-test-all-packages: $(CHAINED_TESTS)
diff --git a/rabbitmq-server/plugins-src/common.mk b/rabbitmq-server/plugins-src/common.mk
deleted file mode 100644 (file)
index d8ed4f8..0000000
+++ /dev/null
@@ -1,143 +0,0 @@
-# Various global definitions
-
-# UMBRELLA_BASE_DIR should be set to the path of the
-# rabbitmq-public-umbrella directory before this file is included.
-
-# Make version check
-REQUIRED_MAKE_VERSION:=3.81
-ifneq ($(shell ( echo "$(MAKE_VERSION)" ; echo "$(REQUIRED_MAKE_VERSION)" ) | sort -t. -n | head -1),$(REQUIRED_MAKE_VERSION))
-$(error GNU make version $(REQUIRED_MAKE_VERSION) required)
-endif
-
-# This is the standard trick for making pattern substitution work
-# (amongst others) when the replacement needs to include a comma.
-COMMA:=,
-
-# Global settings that can be overridden on the command line
-
-# These ones are expected to be passed down to the sub-makes invoked
-# for non-integrated packages
-VERSION ?= 0.0.0
-ERL ?= erl
-ERL_OPTS ?=
-ERLC ?= erlc
-ERLC_OPTS ?= -Wall +debug_info
-TMPDIR ?= /tmp
-
-NODENAME ?= rabbit-test
-ERL_CALL ?= erl_call
-ERL_CALL_OPTS ?= -sname $(NODENAME) -e
-
-# Where we put all the files produced when running tests.
-TEST_TMPDIR=$(TMPDIR)/rabbitmq-test
-
-# Callable functions
-
-# Convert a package name to the corresponding erlang app name
-define package_to_app_name
-$(subst -,_,$(1))
-endef
-
-# If the variable named $(1) holds a non-empty value, return it.
-# Otherwise, set the variable to $(2) and return that value.
-define memoize
-$(if $($(1)),$($(1)),$(eval $(1):=$(2))$(2))
-endef
-
-# Return a canonical form for the path in $(1)
-#
-# Absolute path names can be a bit verbose.  This provides a way to
-# canonicalize path names with more concise results.
-define canonical_path
-$(call memoize,SHORT_$(realpath $(1)),$(1))
-endef
-
-# Convert a package name to a path name
-define package_to_path
-$(call canonical_path,$(UMBRELLA_BASE_DIR)/$(1))
-endef
-
-# Produce a cp command to copy from $(1) to $(2), unless $(1) is
-# empty, in which case do nothing.
-#
-# The optional $(3) gives a suffix to append to the command, if a
-# command is produced.
-define copy
-$(if $(1),cp -r $(1) $(2)$(if $(3), $(3)))
-endef
-
-# Produce the makefile fragment for the package with path in $(1), if
-# it hasn't already been visited.  The path should have been
-# canonicalized via canonical_path.
-define do_package
-# Have we already visited this package?  If so, skip it
-ifndef DONE_$(1)
-PACKAGE_DIR:=$(1)
-include $(UMBRELLA_BASE_DIR)/do-package.mk
-endif
-endef
-
-# This is used to chain test rules, so that test-all-packages works in
-# the presence of 'make -j'
-define chain_test
-$(if $(CHAIN_TESTS),$(CHAINED_TESTS)$(eval CHAINED_TESTS+=$(1)))
-endef
-
-# Mark the non-integrated repos
-NON_INTEGRATED_$(call package_to_path,rabbitmq-server):=true
-NON_INTEGRATED_$(call package_to_path,rabbitmq-erlang-client):=true
-NON_INTEGRATED_$(call package_to_path,rabbitmq-java-client):=true
-NON_INTEGRATED_$(call package_to_path,rabbitmq-dotnet-client):=true
-NON_INTEGRATED_DEPS_$(call package_to_path,rabbitmq-erlang-client):=rabbitmq-server
-
-# Where the coverage package lives
-COVERAGE_PATH:=$(call package_to_path,coverage)
-
-# Where the rabbitmq-server package lives
-RABBITMQ_SERVER_PATH=$(call package_to_path,rabbitmq-server)
-
-# Cleaning support
-ifndef MAKECMDGOALS
-TESTABLEGOALS:=$(.DEFAULT_GOAL)
-else
-TESTABLEGOALS:=$(MAKECMDGOALS)
-endif
-
-# The CLEANING variable can be used to determine whether the top-level
-# goal is cleaning related.  In particular, it can be used to prevent
-# including generated files when cleaning, which might otherwise
-# trigger undesirable activity.
-ifeq "$(strip $(patsubst clean%,,$(patsubst %clean,,$(TESTABLEGOALS))))" ""
-CLEANING:=true
-endif
-
-# Include a generated makefile fragment
-#
-# Note that this includes using "-include", and thus make will proceed
-# even if an error occurs while the fragment is being re-made (we
-# don't use "include" becuase it will produce a superfluous error
-# message when the fragment is re-made because it doesn't exist).
-# Thus you should also list the fragment as a dependency of any rules
-# that will refer to the contents of the fragment.
-define safe_include
-ifndef CLEANING
--include $(1)
-
-# If we fail to make the fragment, make will just loop trying to
-# create it.  So we have to explicitly catch that case.
-$$(if $$(MAKE_RESTARTS),$$(if $$(wildcard $(1)),,$$(error Failed to produce $(1))))
-
-endif
-endef
-
-# This is not the make default, but it is a good idea
-.DELETE_ON_ERROR:
-
-# Declarations for global targets
-.PHONY: all-releasable copy-releasable copy-srcdist all-packages clean-all-packages
-all-releasable::
-copy-releasable::
-copy-srcdist::
-all-packages::
-clean-all-packages::
-check-xref-packages::
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/cowboy-wrapper/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch
deleted file mode 100644 (file)
index f1d8e6a..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-From c2303fb756eeb8bd92dc04764970a43f59940208 Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:48:41 +0000
-Subject: [PATCH 1/7] R12 - Fake iodata() type
-
----
- include/http.hrl    |    2 +-
- src/cowboy_http.erl |    3 ++-
- 2 files changed, 3 insertions(+), 2 deletions(-)
-
-diff --git a/include/http.hrl b/include/http.hrl
-index c66f2b0..c98f873 100644
---- a/include/http.hrl
-+++ b/include/http.hrl
-@@ -47,7 +47,7 @@
-       %% Response.
-       resp_state = waiting   :: locked | waiting | chunks | done,
-       resp_headers = []      :: cowboy_http:headers(),
--      resp_body  = <<>>      :: iodata() | {non_neg_integer(),
-+      resp_body  = <<>>      :: cowboy_http:fake_iodata() | {non_neg_integer(),
-                                                               fun(() -> {sent, non_neg_integer()})},
-       %% Functions.
-diff --git a/src/cowboy_http.erl b/src/cowboy_http.erl
-index 32b0ca9..95a7334 100644
---- a/src/cowboy_http.erl
-+++ b/src/cowboy_http.erl
-@@ -46,7 +46,8 @@
-       | 'Expires' | 'Last-Modified' | 'Accept-Ranges' | 'Set-Cookie'
-       | 'Set-Cookie2' | 'X-Forwarded-For' | 'Cookie' | 'Keep-Alive'
-       | 'Proxy-Connection' | binary().
---type headers() :: [{header(), iodata()}].
-+-type fake_iodata() :: iolist() | binary().
-+-type headers() :: [{header(), fake_iodata()}].
- -type status() :: non_neg_integer() | binary().
- -export_type([method/0, uri/0, version/0, header/0, headers/0, status/0]).
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch
deleted file mode 100644 (file)
index aaeedd6..0000000
+++ /dev/null
@@ -1,165 +0,0 @@
-From 257e64326ad786d19328d343da0ff7d29adbae4e Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:51:30 +0000
-Subject: [PATCH 2/7] R12 - drop all references to boolean() type
-
----
- src/cowboy_cookies.erl        |    8 --------
- src/cowboy_http.erl           |    1 -
- src/cowboy_http_protocol.erl  |    3 +--
- src/cowboy_http_req.erl       |    2 --
- src/cowboy_http_static.erl    |    5 -----
- src/cowboy_http_websocket.erl |    2 +-
- 6 files changed, 2 insertions(+), 19 deletions(-)
-
-diff --git a/src/cowboy_cookies.erl b/src/cowboy_cookies.erl
-index 6818a86..7f5ab60 100644
---- a/src/cowboy_cookies.erl
-+++ b/src/cowboy_cookies.erl
-@@ -112,7 +112,6 @@ cookie(Key, Value, Options) when is_binary(Key)
- %% Internal.
- %% @doc Check if a character is a white space character.
---spec is_whitespace(char()) -> boolean().
- is_whitespace($\s) -> true;
- is_whitespace($\t) -> true;
- is_whitespace($\r) -> true;
-@@ -120,7 +119,6 @@ is_whitespace($\n) -> true;
- is_whitespace(_) -> false.
- %% @doc Check if a character is a seperator.
---spec is_separator(char()) -> boolean().
- is_separator(C) when C < 32 -> true;
- is_separator($\s) -> true;
- is_separator($\t) -> true;
-@@ -144,7 +142,6 @@ is_separator($}) -> true;
- is_separator(_) -> false.
- %% @doc Check if a binary has an ASCII seperator character.
---spec has_seperator(binary()) -> boolean().
- has_seperator(<<>>) ->
-       false;
- has_seperator(<<$/, Rest/binary>>) ->
-@@ -228,7 +225,6 @@ read_quoted(<<C, Rest/binary>>, Acc) ->
-       read_quoted(Rest, <<Acc/binary, C>>).
- %% @doc Drop characters while a function returns true.
---spec binary_dropwhile(fun((char()) -> boolean()), binary()) -> binary().
- binary_dropwhile(_F, <<"">>) ->
-       <<"">>;
- binary_dropwhile(F, String) ->
-@@ -246,8 +242,6 @@ skip_whitespace(String) ->
-       binary_dropwhile(fun is_whitespace/1, String).
- %% @doc Split a binary when the current character causes F to return true.
---spec binary_splitwith(fun((char()) -> boolean()), binary(), binary())
--      -> {binary(), binary()}.
- binary_splitwith(_F, Head, <<>>) ->
-       {Head, <<>>};
- binary_splitwith(F, Head, Tail) ->
-@@ -260,8 +254,6 @@ binary_splitwith(F, Head, Tail) ->
-       end.
- %% @doc Split a binary with a function returning true or false on each char.
---spec binary_splitwith(fun((char()) -> boolean()), binary())
--      -> {binary(), binary()}.
- binary_splitwith(F, String) ->
-       binary_splitwith(F, <<>>, String).
-diff --git a/src/cowboy_http.erl b/src/cowboy_http.erl
-index 95a7334..d7261c8 100644
---- a/src/cowboy_http.erl
-+++ b/src/cowboy_http.erl
-@@ -755,7 +755,6 @@ urlencode(Bin, Opts) ->
-       Upper = proplists:get_value(upper, Opts, false),
-       urlencode(Bin, <<>>, Plus, Upper).
---spec urlencode(binary(), binary(), boolean(), boolean()) -> binary().
- urlencode(<<C, Rest/binary>>, Acc, P=Plus, U=Upper) ->
-       if      C >= $0, C =< $9 -> urlencode(Rest, <<Acc/binary, C>>, P, U);
-               C >= $A, C =< $Z -> urlencode(Rest, <<Acc/binary, C>>, P, U);
-diff --git a/src/cowboy_http_protocol.erl b/src/cowboy_http_protocol.erl
-index baee081..b80745f 100644
---- a/src/cowboy_http_protocol.erl
-+++ b/src/cowboy_http_protocol.erl
-@@ -55,7 +55,7 @@
-       max_line_length :: integer(),
-       timeout :: timeout(),
-       buffer = <<>> :: binary(),
--      hibernate = false :: boolean(),
-+      hibernate = false,
-       loop_timeout = infinity :: timeout(),
-       loop_timeout_ref :: undefined | reference()
- }).
-@@ -440,7 +440,6 @@ format_header(Field) when byte_size(Field) =< 20; byte_size(Field) > 32 ->
- format_header(Field) ->
-       format_header(Field, true, <<>>).
---spec format_header(binary(), boolean(), binary()) -> binary().
- format_header(<<>>, _Any, Acc) ->
-       Acc;
- %% Replicate a bug in OTP for compatibility reasons when there's a - right
-diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl
-index 92d96ad..d729d6c 100644
---- a/src/cowboy_http_req.erl
-+++ b/src/cowboy_http_req.erl
-@@ -515,13 +515,11 @@ set_resp_body_fun(StreamLen, StreamFun, Req) ->
- %% @doc Return whether the given header has been set for the response.
---spec has_resp_header(cowboy_http:header(), #http_req{}) -> boolean().
- has_resp_header(Name, #http_req{resp_headers=RespHeaders}) ->
-       NameBin = header_to_binary(Name),
-       lists:keymember(NameBin, 1, RespHeaders).
- %% @doc Return whether a body has been set for the response.
---spec has_resp_body(#http_req{}) -> boolean().
- has_resp_body(#http_req{resp_body={Length, _}}) ->
-       Length > 0;
- has_resp_body(#http_req{resp_body=RespBody}) ->
-diff --git a/src/cowboy_http_static.erl b/src/cowboy_http_static.erl
-index 0ee996a..d370046 100644
---- a/src/cowboy_http_static.erl
-+++ b/src/cowboy_http_static.erl
-@@ -207,8 +207,6 @@ allowed_methods(Req, State) ->
-       {['GET', 'HEAD'], Req, State}.
- %% @private
---spec malformed_request(#http_req{}, #state{}) ->
--              {boolean(), #http_req{}, #state{}}.
- malformed_request(Req, #state{filepath=error}=State) ->
-       {true, Req, State};
- malformed_request(Req, State) ->
-@@ -216,8 +214,6 @@ malformed_request(Req, State) ->
- %% @private Check if the resource exists under the document root.
---spec resource_exists(#http_req{}, #state{}) ->
--              {boolean(), #http_req{}, #state{}}.
- resource_exists(Req, #state{fileinfo={error, _}}=State) ->
-       {false, Req, State};
- resource_exists(Req, #state{fileinfo={ok, Fileinfo}}=State) ->
-@@ -227,7 +223,6 @@ resource_exists(Req, #state{fileinfo={ok, Fileinfo}}=State) ->
- %% @private
- %% Access to a file resource is forbidden if it exists and the local node does
- %% not have permission to read it. Directory listings are always forbidden.
---spec forbidden(#http_req{}, #state{}) -> {boolean(), #http_req{}, #state{}}.
- forbidden(Req, #state{fileinfo={_, #file_info{type=directory}}}=State) ->
-       {true, Req, State};
- forbidden(Req, #state{fileinfo={error, eacces}}=State) ->
-diff --git a/src/cowboy_http_websocket.erl b/src/cowboy_http_websocket.erl
-index 0f0204c..5f59891 100644
---- a/src/cowboy_http_websocket.erl
-+++ b/src/cowboy_http_websocket.erl
-@@ -54,7 +54,7 @@
-       timeout = infinity :: timeout(),
-       timeout_ref = undefined :: undefined | reference(),
-       messages = undefined :: undefined | {atom(), atom(), atom()},
--      hibernate = false :: boolean(),
-+      hibernate = false,
-       eop :: undefined | tuple(), %% hixie-76 specific.
-       origin = undefined :: undefined | binary() %% hixie-76 specific.
- }).
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch
deleted file mode 100644 (file)
index e0ebae9..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-From 4db80ab7bacf04502ad2d29d4760e04a6d787a83 Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:52:23 +0000
-Subject: [PATCH 3/7] R12: drop all references to reference() type
-
----
- src/cowboy_http_protocol.erl  |    2 +-
- src/cowboy_http_websocket.erl |    2 +-
- src/cowboy_listener.erl       |    2 +-
- 3 files changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/src/cowboy_http_protocol.erl b/src/cowboy_http_protocol.erl
-index b80745f..0183785 100644
---- a/src/cowboy_http_protocol.erl
-+++ b/src/cowboy_http_protocol.erl
-@@ -57,7 +57,7 @@
-       buffer = <<>> :: binary(),
-       hibernate = false,
-       loop_timeout = infinity :: timeout(),
--      loop_timeout_ref :: undefined | reference()
-+      loop_timeout_ref
- }).
- %% API.
-diff --git a/src/cowboy_http_websocket.erl b/src/cowboy_http_websocket.erl
-index 5f59891..5100213 100644
---- a/src/cowboy_http_websocket.erl
-+++ b/src/cowboy_http_websocket.erl
-@@ -52,7 +52,7 @@
-       opts :: any(),
-       challenge = undefined :: undefined | binary() | {binary(), binary()},
-       timeout = infinity :: timeout(),
--      timeout_ref = undefined :: undefined | reference(),
-+      timeout_ref = undefined,
-       messages = undefined :: undefined | {atom(), atom(), atom()},
-       hibernate = false,
-       eop :: undefined | tuple(), %% hixie-76 specific.
-diff --git a/src/cowboy_listener.erl b/src/cowboy_listener.erl
-index c19d079..86e87f1 100644
---- a/src/cowboy_listener.erl
-+++ b/src/cowboy_listener.erl
-@@ -23,8 +23,8 @@
- -record(state, {
-       req_pools = [] :: [{atom(), non_neg_integer()}],
--      reqs_table :: ets:tid(),
--      queue = [] :: [{pid(), reference()}]
-+      reqs_table,
-+      queue = []
- }).
- %% API.
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch
deleted file mode 100644 (file)
index d6f097c..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-From dfb750f491208a8e30cab0fa701dd866d60734b8 Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:53:08 +0000
-Subject: [PATCH 4/7] R12: drop references to iodata() type
-
----
- src/cowboy_http_req.erl |    6 ------
- 1 files changed, 0 insertions(+), 6 deletions(-)
-
-diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl
-index d729d6c..64e757c 100644
---- a/src/cowboy_http_req.erl
-+++ b/src/cowboy_http_req.erl
-@@ -478,8 +478,6 @@ set_resp_cookie(Name, Value, Options, Req) ->
-       set_resp_header(HeaderName, HeaderValue, Req).
- %% @doc Add a header to the response.
---spec set_resp_header(cowboy_http:header(), iodata(), #http_req{})
--      -> {ok, #http_req{}}.
- set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) ->
-       NameBin = header_to_binary(Name),
-       {ok, Req#http_req{resp_headers=[{NameBin, Value}|RespHeaders]}}.
-@@ -489,7 +487,6 @@ set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) ->
- %% The body set here is ignored if the response is later sent using
- %% anything other than reply/2 or reply/3. The response body is expected
- %% to be a binary or an iolist.
---spec set_resp_body(iodata(), #http_req{}) -> {ok, #http_req{}}.
- set_resp_body(Body, Req) ->
-       {ok, Req#http_req{resp_body=Body}}.
-@@ -537,8 +534,6 @@ reply(Status, Headers, Req=#http_req{resp_body=Body}) ->
-       reply(Status, Headers, Body, Req).
- %% @doc Send a reply to the client.
---spec reply(cowboy_http:status(), cowboy_http:headers(), iodata(), #http_req{})
--      -> {ok, #http_req{}}.
- reply(Status, Headers, Body, Req=#http_req{socket=Socket,
-               transport=Transport, connection=Connection, pid=ReqPid,
-               method=Method, resp_state=waiting, resp_headers=RespHeaders}) ->
-@@ -586,7 +581,6 @@ chunked_reply(Status, Headers, Req=#http_req{socket=Socket,
- %% @doc Send a chunk of data.
- %%
- %% A chunked reply must have been initiated before calling this function.
---spec chunk(iodata(), #http_req{}) -> ok | {error, atom()}.
- chunk(_Data, #http_req{socket=_Socket, transport=_Transport, method='HEAD'}) ->
-       ok;
- chunk(Data, #http_req{socket=Socket, transport=Transport, resp_state=chunks}) ->
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch
deleted file mode 100644 (file)
index 5fc06fd..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-From c7aef1d044a1e83fcd6be7a83b2c763c0366d4f8 Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:53:36 +0000
-Subject: [PATCH 5/7] R12: drop references to Default:any() type
-
----
- src/cowboy_http_req.erl |    8 --------
- 1 files changed, 0 insertions(+), 8 deletions(-)
-
-diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl
-index 64e757c..c884f5a 100644
---- a/src/cowboy_http_req.erl
-+++ b/src/cowboy_http_req.erl
-@@ -147,8 +147,6 @@ qs_val(Name, Req) when is_binary(Name) ->
- %% @doc Return the query string value for the given key, or a default if
- %% missing.
---spec qs_val(binary(), #http_req{}, Default)
--      -> {binary() | true | Default, #http_req{}} when Default::any().
- qs_val(Name, Req=#http_req{raw_qs=RawQs, qs_vals=undefined,
-               urldecode={URLDecFun, URLDecArg}}, Default) when is_binary(Name) ->
-       QsVals = parse_qs(RawQs, fun(Bin) -> URLDecFun(Bin, URLDecArg) end),
-@@ -180,8 +178,6 @@ binding(Name, Req) when is_atom(Name) ->
- %% @doc Return the binding value for the given key obtained when matching
- %% the host and path against the dispatch list, or a default if missing.
---spec binding(atom(), #http_req{}, Default)
--      -> {binary() | Default, #http_req{}} when Default::any().
- binding(Name, Req, Default) when is_atom(Name) ->
-       case lists:keyfind(Name, 1, Req#http_req.bindings) of
-               {Name, Value} -> {Value, Req};
-@@ -200,8 +196,6 @@ header(Name, Req) when is_atom(Name) orelse is_binary(Name) ->
-       header(Name, Req, undefined).
- %% @doc Return the header value for the given key, or a default if missing.
---spec header(atom() | binary(), #http_req{}, Default)
--      -> {binary() | Default, #http_req{}} when Default::any().
- header(Name, Req, Default) when is_atom(Name) orelse is_binary(Name) ->
-       case lists:keyfind(Name, 1, Req#http_req.headers) of
-               {Name, Value} -> {Value, Req};
-@@ -313,8 +307,6 @@ cookie(Name, Req) when is_binary(Name) ->
- %% @doc Return the cookie value for the given key, or a default if
- %% missing.
---spec cookie(binary(), #http_req{}, Default)
--      -> {binary() | true | Default, #http_req{}} when Default::any().
- cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) ->
-       case header('Cookie', Req) of
-               {undefined, Req2} ->
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch
deleted file mode 100644 (file)
index 183ebd2..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-From 81106c53b80f5d0fa441b893048bbdc6c9e2c4f0 Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:54:31 +0000
-Subject: [PATCH 6/7] Use erlang:integer_to_list and lists:max instead of bifs
-
----
- src/cowboy_http_req.erl    |    2 +-
- src/cowboy_http_static.erl |    2 +-
- src/cowboy_multipart.erl   |    4 ++--
- 3 files changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl
-index c884f5a..bf4ac7a 100644
---- a/src/cowboy_http_req.erl
-+++ b/src/cowboy_http_req.erl
-@@ -576,7 +576,7 @@ chunked_reply(Status, Headers, Req=#http_req{socket=Socket,
- chunk(_Data, #http_req{socket=_Socket, transport=_Transport, method='HEAD'}) ->
-       ok;
- chunk(Data, #http_req{socket=Socket, transport=Transport, resp_state=chunks}) ->
--      Transport:send(Socket, [integer_to_list(iolist_size(Data), 16),
-+      Transport:send(Socket, [erlang:integer_to_list(iolist_size(Data), 16),
-               <<"\r\n">>, Data, <<"\r\n">>]).
- %% @doc Send an upgrade reply.
-diff --git a/src/cowboy_http_static.erl b/src/cowboy_http_static.erl
-index d370046..da3bd33 100644
---- a/src/cowboy_http_static.erl
-+++ b/src/cowboy_http_static.erl
-@@ -412,7 +412,7 @@ attr_etag_function(Args, Attrs) ->
- -spec attr_etag_function([etagarg()], [fileattr()], [binary()]) -> binary().
- attr_etag_function(_Args, [], Acc) ->
--      list_to_binary(integer_to_list(erlang:crc32(Acc), 16));
-+      list_to_binary(erlang:integer_to_list(erlang:crc32(Acc), 16));
- attr_etag_function(Args, [H|T], Acc) ->
-       {_, Value} = lists:keyfind(H, 1, Args),
-       attr_etag_function(Args, T, [term_to_binary(Value)|Acc]).
-diff --git a/src/cowboy_multipart.erl b/src/cowboy_multipart.erl
-index b7aeb54..c9b5b6c 100644
---- a/src/cowboy_multipart.erl
-+++ b/src/cowboy_multipart.erl
-@@ -105,7 +105,7 @@ parse_boundary_eol(Bin, Pattern) ->
-                       cowboy_http:whitespace(Rest, Fun);
-               nomatch ->
-                       % CRLF not found in the given binary.
--                      RestStart = max(byte_size(Bin) - 1, 0),
-+                      RestStart = lists:max([byte_size(Bin) - 1, 0]),
-                       <<_:RestStart/binary, Rest/binary>> = Bin,
-                       more(Rest, fun (NewBin) -> parse_boundary_eol(NewBin, Pattern) end)
-       end.
-@@ -175,7 +175,7 @@ skip(Bin, Pattern = {P, PSize}) ->
-                       parse_boundary_tail(Rest, Pattern);
-               nomatch ->
-                       % Boundary not found, need more data.
--                      RestStart = max(byte_size(Bin) - PSize + 1, 0),
-+                      RestStart = lists:max([byte_size(Bin) - PSize + 1, 0]),
-                       <<_:RestStart/binary, Rest/binary>> = Bin,
-                       more(Rest, fun (NewBin) -> skip(NewBin, Pattern) end)
-       end.
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch
deleted file mode 100644 (file)
index 1b1f3de..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-From 547731d5490b36f1239a99e6c4acc1964e724a6e Mon Sep 17 00:00:00 2001
-From: Marek Majkowski <majek04@gmail.com>
-Date: Thu, 26 Jan 2012 12:54:49 +0000
-Subject: [PATCH 7/7] R12 - type definitions must be ordered
-
----
- src/cowboy_multipart.erl |   10 +++++-----
- 1 files changed, 5 insertions(+), 5 deletions(-)
-
-diff --git a/src/cowboy_multipart.erl b/src/cowboy_multipart.erl
-index c9b5b6c..0bd123a 100644
---- a/src/cowboy_multipart.erl
-+++ b/src/cowboy_multipart.erl
-@@ -15,15 +15,15 @@
- %% @doc Multipart parser.
- -module(cowboy_multipart).
---type part_parser() :: parser(more(part_result())).
-+-type part_parser() :: any().
- -type parser(T) :: fun((binary()) -> T).
- -type more(T) :: T | {more, parser(T)}.
---type part_result() :: headers() | eof.
---type headers() :: {headers, http_headers(), body_cont()}.
-+-type part_result() :: any().
-+-type headers() :: any().
- -type http_headers() :: [{atom() | binary(), binary()}].
---type body_cont() :: cont(more(body_result())).
-+-type body_cont() :: any().
- -type cont(T) :: fun(() -> T).
---type body_result() :: {body, binary(), body_cont()} | end_of_part().
-+-type body_result() :: any().
- -type end_of_part() :: {end_of_part, cont(more(part_result()))}.
- -type disposition() :: {binary(), [{binary(), binary()}]}.
--- 
-1.7.0.4
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch
deleted file mode 100644 (file)
index 494c6b8..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl
-index 92d96ad..dd772df 100644
---- a/src/cowboy_http_req.erl
-+++ b/src/cowboy_http_req.erl
-@@ -288,6 +282,11 @@ parse_header(Name, Req, Default) when Name =:= 'Upgrade' ->
-               fun (Value) ->
-                       cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2)
-               end);
-+parse_header(Name, Req, Default) when Name =:= <<"sec-websocket-protocol">> ->
-+    parse_header(Name, Req, Default,
-+        fun (Value) ->
-+            cowboy_http:nonempty_list(Value, fun cowboy_http:token/2)
-+        end);
- parse_header(Name, Req, Default) ->
-       {Value, Req2} = header(Name, Req, Default),
-       {undefined, Value, Req2}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/Makefile b/rabbitmq-server/plugins-src/cowboy-wrapper/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/README.md b/rabbitmq-server/plugins-src/cowboy-wrapper/README.md
deleted file mode 100644 (file)
index e1f1d5e..0000000
+++ /dev/null
@@ -1 +0,0 @@
-Cowboy requires R14
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.done b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml
deleted file mode 100644 (file)
index f04becf..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-language: erlang
-otp_release:
-  - R15B
-  - R14B04
-  - R14B03
-  - R14B02
-script: "make tests"
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS
deleted file mode 100644 (file)
index a07a69d..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-Cowboy is available thanks to the work of:
-
-Loïc Hoguin
-Anthony Ramine
-Magnus Klaar
-Paul Oliver
-Steven Gravell
-Tom Burdick
-Hunter Morris
-Yurii Rashkovskii
-Ali Sabil
-Hans Ulrich Niedermann
-Jesper Louis Andersen
-Mathieu Lecarme
-Max Lapshin
-Michiel Hakvoort
-Ori Bar
-Alisdair Sullivan
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md
deleted file mode 100644 (file)
index a4b815b..0000000
+++ /dev/null
@@ -1,213 +0,0 @@
-CHANGELOG
-=========
-
-0.4.0
------
-
-*   Set the cowboy_listener process priority to high
-
-    As it is the central process used by all incoming requests
-    we need to set its priority to high to avoid timeouts that
-    would happen otherwise when reaching a huge number of
-    concurrent requests.
-
-*   Add cowboy:child_spec/6 for embedding in other applications
-
-*   Add cowboy_http_rest, an experimental REST protocol support
-
-    Based on the Webmachine diagram and documentation. It is a
-    new implementation, not a port, therefore a few changes have
-    been made. However all the callback names are the same and
-    should behave similarly to Webmachine.
-
-    There is currently no documentation other than the Webmachine
-    resource documentation and the comments found in cowboy_http_rest,
-    which itself should be fairly easy to read and understand.
-
-*   Add cowboy_http_static, an experimental static file handler
-
-    Makes use of the aforementioned REST protocol support to
-    deliver files with proper content type and cache headers.
-
-    Note that this uses the new file:sendfile support when
-    appropriate, which currently requires the VM to be started
-    with the +A option defined, else errors may randomly appear.
-
-*   Add cowboy_bstr module for binary strings related functions
-
-*   Add cowboy_http module for HTTP parsing functions
-
-    This module so far contains various functions for HTTP header
-    parsing along with URL encoding and decoding.
-
-*   Remove quoted from the default dependencies
-
-    This should make Cowboy much easier to compile and use by default.
-    It is of course still possible to use quoted as your URL decoding
-    library in Cowboy thanks to the newly added urldecode option.
-
-*   Fix supervisor spec for non dynamic modules to allow upgrades to complete
-
-*   Add cowboy:accept_ack/1 for a cleaner handling of the shoot message
-
-    Before, when the listener accepted a connection, the newly created
-    process was waiting for a message containing the atom 'shoot' before
-    proceeding. This has been replaced by the cowboy:accept_ack/1 function.
-
-    This function should be used where 'shoot' was received because the
-    contents of the message have changed (and could change again in the
-    distant future).
-
-*   Update binary parsing expressions to avoid hype crashes
-
-    More specifically, /bits was replaced by /binary.
-
-*   Rename the type cowboy_dispatcher:path_tokens/0 to tokens/0
-
-*   Remove the cowboy_clock:date/0, time/0 and datetime/0 types
-
-    The calendar module exports those same types properly since R14B04.
-
-*   Add cacertfile configuration option to cowboy_ssl_transport
-
-*   Add cowboy_protocol behaviour
-
-*   Remove -Wbehaviours dialyzer option unavailable in R15B
-
-*   Many tests and specs improvements
-
-### cowboy_http_req
-
-*   Fix a crash when reading the request body
-
-*   Add parse_header/2 and parse_header/3
-
-    The following headers can now be semantically parsed: Connection, Accept,
-    Accept-Charset, Accept-Encoding, Accept-Language, Content-Length,
-    Content-Type, If-Match, If-None-Match, If-Modified-Since,
-    If-Unmodified-Since, Upgrade
-
-*   Add set_resp_header/3, set_resp_cookie/4 and set_resp_body/2
-
-    These functions allow handlers to set response headers and body
-    without having to reply directly.
-
-*   Add set_resp_body_fun/3
-
-    This function allows handlers to stream the body of the response
-    using the given fun. The size of the response must be known beforehand.
-
-*   Add transport/1 to obtain the transport and socket for the request
-
-    This allows handlers to have low-level socket access in those cases
-    where they do need it, like when streaming a response body with
-    set_resp_body_fun/3.
-
-*   Add peer_addr/1
-
-    This function tries to guess the real peer IP based on the HTTP
-    headers received.
-
-*   Add meta/2 and meta/3 to save useful protocol information
-
-    Currently used to save the Websocket protocol version currently used,
-    and to save request information in the REST protocol handler.
-
-*   Add reply/2 and reply/3 aliases to reply/4
-
-*   Add upgrade_reply/3 for protocol upgrades
-
-### cowboy_http_protocol
-
-*   Add the {urldecode, fun urldecode/2} option
-
-    Added when quoted was removed from the default build. Can be used to
-    tell Cowboy to use quoted or any other URL decoding routine.
-
-*   Add the max_keepalive option
-
-*   Add the max_line_length option
-
-*   Allow HTTP handlers to stop during init/3
-
-    To do so they can return {shutdown, Req, State}.
-
-*   Add loops support in HTTP handlers for proper long-polling support
-
-    A loop can be entered by returning either of {loop, Req, State},
-    {loop, Req, State, hibernate}, {loop, Req, State, Timeout} or
-    {loop, Req, State, Timeout, hibernate} from init/3.
-
-    Loops are useful when we cannot reply immediately and instead
-    are waiting for an Erlang message to be able to complete the request,
-    as would typically be done for long-polling.
-
-    Loop support in the protocol means that timeouts and hibernating
-    are well tested and handled so you can use those options without
-    worrying. It is recommended to set the timeout option.
-
-    When a loop is started, handle/2 will never be called so it does
-    not need to be defined. When the request process receives an Erlang
-    message, it will call the info/3 function with the message as the
-    first argument.
-
-    Like in OTP, you do need to set timeout and hibernate again when
-    returning from info/3 to enable them until the next call.
-
-*   Fix the sending of 500 errors when handlers crash
-
-    Now we send an error response when no response has been sent,
-    and do nothing more than close the connection if anything
-    did get sent.
-
-*   Fix a crash when the server is sent HTTP responses
-
-*   Fix HTTP timeouts handling when the Request-Line wasn't received
-
-*   Fix the handling of the max number of empty lines between requests
-
-*   Fix the handling of HEAD requests
-
-*   Fix HTTP/1.0 Host header handling
-
-*   Reply status 400 if we receive an unexpected value or error for headers
-
-*   Properly close when the application sends "Connection: close" header
-
-*   Close HTTP connections on all errors
-
-*   Improve the error message for HTTP handlers
-
-### cowboy_http_websocket
-
-*   Add websocket support for all versions up to RFC 6455
-
-    Support isn't perfect yet according to the specifications, but
-    is working against all currently known client implementations.
-
-*   Allow websocket_init/3 to return with the hibernate option set
-
-*   Add {shutdown, Req} return value to websocket_init/3 to fail an upgrade
-
-*   Fix websocket timeout handling
-
-*   Fix error messages: wrong callback name was reported on error
-
-*   Fix byte-by-byte websocket handling
-
-*   Fix an issue when using hixie-76 with certain proxies
-
-*   Fix a crash in the hixie-76 handshake
-
-*   Fix the handshake when SSL is used on port 443
-
-*   Fix a crash in the handshake when cowboy_http_req:compact/1 is used
-
-*   Fix handshake when a query string is present
-
-*   Fix a crash when the Upgrade header contains more than one token
-
-0.2.0
------
-
-*   Initial release.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile
deleted file mode 100644 (file)
index e5524f4..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-# See LICENSE for licensing information.
-
-DIALYZER = dialyzer
-REBAR = rebar
-
-all: app
-
-app: deps
-       @$(REBAR) compile
-
-deps:
-       @$(REBAR) get-deps
-
-clean:
-       @$(REBAR) clean
-       rm -f test/*.beam
-       rm -f erl_crash.dump
-
-tests: clean app eunit ct
-
-eunit:
-       @$(REBAR) eunit skip_deps=true
-
-ct:
-       @$(REBAR) ct skip_deps=true
-
-build-plt:
-       @$(DIALYZER) --build_plt --output_plt .cowboy_dialyzer.plt \
-               --apps kernel stdlib sasl inets crypto public_key ssl
-
-dialyze:
-       @$(DIALYZER) --src src --plt .cowboy_dialyzer.plt -Werror_handling \
-               -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-
-docs:
-       @$(REBAR) doc skip_deps=true
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md
deleted file mode 100644 (file)
index d5950b9..0000000
+++ /dev/null
@@ -1,290 +0,0 @@
-Cowboy
-======
-
-Cowboy is a small, fast and modular HTTP server written in Erlang.
-
-Cowboy is also a socket acceptor pool, able to accept connections
-for any kind of TCP protocol.
-
-Goals
------
-
-Cowboy aims to provide the following advantages:
-
-* **Small** code base.
-* Damn **fast**.
-* **Modular**: transport and protocol handlers are replaceable.
-* **Binary HTTP** for greater speed and lower memory usage.
-* Easy to **embed** inside another application.
-* Selectively **dispatch** requests to handlers, allowing you to send some
-  requests to your embedded code and others to a FastCGI application in
-  PHP or Ruby.
-* No parameterized module. No process dictionary. **Clean** Erlang code.
-
-The server is currently in early development. Comments and suggestions are
-more than welcome. To contribute, either open bug reports, or fork the project
-and send us pull requests with new or improved functionality. You should
-discuss your plans with us before doing any serious work, though, to avoid
-duplicating efforts.
-
-Quick start
------------
-
-* Add Cowboy as a rebar or agner dependency to your application.
-* Start Cowboy and add one or more listeners.
-* Write handlers for your application.
-* Check out [examples](https://github.com/extend/cowboy_examples)!
-
-Getting Started
----------------
-
-At heart, Cowboy is nothing more than an TCP acceptor pool. All it does is
-accept connections received on a given port and using a given transport,
-like TCP or SSL, and forward them to a request handler for the given
-protocol. Acceptors and request handlers are of course supervised
-automatically.
-
-It just so happens that Cowboy also includes an HTTP protocol handler.
-But Cowboy does nothing by default. You need to explicitly ask Cowboy
-to listen on a port with your chosen transport and protocol handlers.
-To do so, you must start a listener.
-
-A listener is a special kind of supervisor that manages both the
-acceptor pool and the request processes. It is named and can thus be
-started and stopped at will.
-
-An acceptor pool is a pool of processes whose only role is to accept
-new connections. It's good practice to have many of these processes
-as they are very cheap and allow much quicker response when you get
-many connections. Of course, as with everything else, you should
-**benchmark** before you decide what's best for you.
-
-Cowboy includes a TCP transport handler for HTTP and an SSL transport
-handler for HTTPS. The transport handlers can of course be reused for
-other protocols like FTP or IRC.
-
-The HTTP protocol requires one last thing to continue: dispatching rules.
-Don't worry about it right now though and continue reading, it'll all
-be explained.
-
-You can start and stop listeners by calling `cowboy:start_listener/6` and
-`cowboy:stop_listener/1` respectively.
-
-The following example demonstrates the startup of a very simple listener.
-
-``` erlang
-application:start(cowboy),
-Dispatch = [
-    %% {Host, list({Path, Handler, Opts})}
-    {'_', [{'_', my_handler, []}]}
-],
-%% Name, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts
-cowboy:start_listener(my_http_listener, 100,
-    cowboy_tcp_transport, [{port, 8080}],
-    cowboy_http_protocol, [{dispatch, Dispatch}]
-).
-```
-
-This is not enough though, you must also write the my_handler module
-to process the incoming HTTP requests. Of course Cowboy comes with
-predefined handlers for specific tasks but most of the time you'll
-want to write your own handlers for your application.
-
-Following is an example of a "Hello World!" HTTP handler.
-
-``` erlang
--module(my_handler).
--export([init/3, handle/2, terminate/2]).
-
-init({tcp, http}, Req, Opts) ->
-    {ok, Req, undefined_state}.
-
-handle(Req, State) ->
-    {ok, Req2} = cowboy_http_req:reply(200, [], <<"Hello World!">>, Req),
-    {ok, Req2, State}.
-
-terminate(Req, State) ->
-    ok.
-```
-
-You can also write handlers that do not reply directly. Instead, such handlers
-will wait for an Erlang message from another process and only reply when
-receiving such message, or timeout if it didn't arrive in time.
-
-This is especially useful for long-polling functionality, as Cowboy will handle
-process hibernation and timeouts properly, preventing mistakes if you were to
-write the code yourself. An handler of that kind can be defined like this:
-
-``` erlang
--module(my_loop_handler).
--export([init/3, info/3, terminate/2]).
-
--define(TIMEOUT, 60000).
-
-init({tcp, http}, Req, Opts) ->
-       {loop, Req, undefined_state, ?TIMEOUT, hibernate}.
-
-info({reply, Body}, Req, State) ->
-       {ok, Req2} = cowboy_http_req:reply(200, [], Body, Req),
-       {ok, Req2, State};
-info(Message, Req, State) ->
-       {loop, Req, State, hibernate}.
-
-terminate(Req, State) ->
-       ok.
-```
-
-It is of course possible to combine both type of handlers together as long as
-you return the proper tuple from init/3.
-
-**Note**: versions prior to `0.4.0` used the
-[quoted](https://github.com/klaar/quoted.erl) library instead of the built in
-`cowboy_http:urldecode/2` function. If you want to retain this you must add it
-as a dependency to your application and add the following cowboy_http_protocol
-option:
-
-``` erlang
-    {urldecode, {fun quoted:from_url/2, quoted:make([])}}
-```
-
-Continue reading to learn how to dispatch rules and handle requests.
-
-Dispatch rules
---------------
-
-Cowboy allows you to dispatch HTTP requests directly to a specific handler
-based on the hostname and path information from the request. It also lets
-you define static options for the handler directly in the rules.
-
-To match the hostname and path, Cowboy requires a list of tokens. For
-example, to match the "dev-extend.eu" domain name, you must specify
-`[<<"dev-extend">>, <<"eu">>]`. Or, to match the "/path/to/my/resource"
-you must use `[<<"path">>, <<"to">>, <<"my">>, <<"resource">>]`. All the
-tokens must be given as binary.
-
-You can use the special token `'_'` (the atom underscore) to indicate that
-you accept anything in that position. For example if you have both
-"dev-extend.eu" and "dev-extend.fr" domains, you can use the match spec
-`[<<"dev-extend">>, '_']` to match any top level extension.
-
-Finally, you can also match multiple leading segments of the domain name and
-multiple trailing segments of the request path using the atom `'...'` (the atom
-ellipsis) respectively as the first host token or the last path token. For
-example, host rule `['...', <<"dev-extend">>, <<"eu">>]` can match both
-"cowboy.bugs.dev-extend.eu" and "dev-extend.eu" and path rule
-`[<<"projects">>, '...']` can match both "/projects" and
-"/projects/cowboy/issues/42". The host leading segments and the path trailing
-segments can later be retrieved through `cowboy_http_req:host_info/1` and
-`cowboy_http_req:path_info/1`.
-
-Any other atom used as a token will bind the value to this atom when
-matching. To follow on our hostnames example, `[<<"dev-extend">>, ext]`
-would bind the values `<<"eu">>` and `<<"fr">>` to the ext atom, that you
-can later retrieve in your handler by calling `cowboy_http_req:binding/{2,3}`.
-
-You can also accept any match spec by using the atom `'_'` directly instead of
-a list of tokens. Our hello world example above uses this to forward all
-requests to a single handler.
-
-There is currently no way to match multiple tokens at once.
-
-Requests handling
------------------
-
-Requests are passed around in the Request variable. Although they are
-defined as a record, it is recommended to access them only through the
-cowboy_http_req module API.
-
-You can retrieve the HTTP method, HTTP version, peer address and port,
-host tokens, raw host, used port, path tokens, raw path, query string
-values, bound values from the dispatch step, header values from the
-request. You can also read the request body, if any, optionally parsing
-it as a query string. Finally, the request allows you to send a response
-to the client.
-
-See the cowboy_http_req module for more information.
-
-Websockets
-----------
-
-The Websocket protocol is built upon the HTTP protocol. It first sends
-an HTTP request for an handshake, performs it and then switches
-to Websocket. Therefore you need to write a standard HTTP handler to
-confirm the handshake should be completed and then the Websocket-specific
-callbacks.
-
-A simple handler doing nothing but sending a repetitive message using
-Websocket would look like this:
-
-``` erlang
--module(my_ws_handler).
--export([init/3]).
--export([websocket_init/3, websocket_handle/3,
-    websocket_info/3, websocket_terminate/3]).
-
-init({tcp, http}, Req, Opts) ->
-    {upgrade, protocol, cowboy_http_websocket}.
-
-websocket_init(TransportName, Req, _Opts) ->
-    erlang:start_timer(1000, self(), <<"Hello!">>),
-    {ok, Req, undefined_state}.
-
-websocket_handle({text, Msg}, Req, State) ->
-    {reply, {text, << "That's what she said! ", Msg/binary >>}, Req, State};
-websocket_handle(_Data, Req, State) ->
-    {ok, Req, State}.
-
-websocket_info({timeout, _Ref, Msg}, Req, State) ->
-    erlang:start_timer(1000, self(), <<"How' you doin'?">>),
-    {reply, {text, Msg}, Req, State};
-websocket_info(_Info, Req, State) ->
-    {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
-    ok.
-```
-
-Of course you can have an HTTP handler doing both HTTP and Websocket
-handling, but for the sake of this example we're ignoring the HTTP
-part entirely.
-
-As the Websocket protocol is still a draft the API is subject to change
-regularly when support to the most recent drafts gets added. Features may
-be added, changed or removed before the protocol gets finalized. Cowboy
-tries to implement all drafts transparently and give a single interface to
-handle them all, however.
-
-Using Cowboy with other protocols
----------------------------------
-
-One of the strengths of Cowboy is of course that you can use it with any
-protocol you want. The only downside is that if it's not HTTP, you'll
-probably have to write the protocol handler yourself.
-
-The only exported function a protocol handler needs is the start_link/4
-function, with arguments ListenerPid, Socket, Transport and Opts. ListenerPid
-is the pid to the listener's gen_server, managing the connections. Socket is of
-course the client socket; Transport is the module name of the chosen transport
-handler and Opts is protocol options defined when starting the listener.
-
-After initializing your protocol, it is recommended to call the
-function cowboy:accept_ack/1 with the ListenerPid as argument,
-as it will ensure Cowboy has been able to fully initialize the socket.
-Anything you do past this point is up to you!
-
-If you need to change some socket options, like enabling raw mode for example,
-you can call the <em>Transport:setopts/2</em> function. It is the protocol's
-responsability to manage the socket usage, there should be no need for an user
-to specify that kind of options while starting a listener.
-
-You should definitely look at the cowboy_http_protocol module for a great
-example of fast request handling if you need to. Otherwise it's probably
-safe to use `{active, once}` mode and handle everything as it comes.
-
-Note that while you technically can run a protocol handler directly as a
-gen_server or a gen_fsm, it's probably not a good idea, as the only call
-you'll ever receive from Cowboy is the start_link/4 call. On the other
-hand, feel free to write a very basic protocol handler which then forwards
-requests to a gen_server or gen_fsm. By doing so however you must take
-care to supervise their processes as Cowboy only knows about the protocol
-handler itself.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/cover.spec b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/cover.spec
deleted file mode 100644 (file)
index 9dba11c..0000000
+++ /dev/null
@@ -1 +0,0 @@
-{incl_app, cowboy, details}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc
deleted file mode 100644 (file)
index 56648c4..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-@author Loïc Hoguin <essen@dev-extend.eu>
-@copyright 2011 Loïc Hoguin
-@version HEAD
-@title Small, fast, modular HTTP server.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl
deleted file mode 100644 (file)
index c98f873..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--record(http_req, {
-       %% Transport.
-       socket     = undefined :: undefined | inet:socket(),
-       transport  = undefined :: undefined | module(),
-       connection = keepalive :: keepalive | close,
-
-       %% Request.
-       pid        = undefined :: pid(),
-       method     = 'GET'     :: cowboy_http:method(),
-       version    = {1, 1}    :: cowboy_http:version(),
-       peer       = undefined :: undefined | {inet:ip_address(), inet:ip_port()},
-       host       = undefined :: undefined | cowboy_dispatcher:tokens(),
-       host_info  = undefined :: undefined | cowboy_dispatcher:tokens(),
-       raw_host   = undefined :: undefined | binary(),
-       port       = undefined :: undefined | inet:ip_port(),
-       path       = undefined :: undefined | '*' | cowboy_dispatcher:tokens(),
-       path_info  = undefined :: undefined | cowboy_dispatcher:tokens(),
-       raw_path   = undefined :: undefined | binary(),
-       qs_vals    = undefined :: undefined | list({binary(), binary() | true}),
-       raw_qs     = undefined :: undefined | binary(),
-       bindings   = undefined :: undefined | cowboy_dispatcher:bindings(),
-       headers    = []        :: cowboy_http:headers(),
-       p_headers  = []        :: [any()], %% @todo Improve those specs.
-       cookies    = undefined :: undefined | [{binary(), binary()}],
-       meta       = []        :: [{atom(), any()}],
-
-       %% Request body.
-       body_state = waiting   :: waiting | done |
-                                                               {multipart, non_neg_integer(), fun()},
-       buffer     = <<>>      :: binary(),
-
-       %% Response.
-       resp_state = waiting   :: locked | waiting | chunks | done,
-       resp_headers = []      :: cowboy_http:headers(),
-       resp_body  = <<>>      :: cowboy_http:fake_iodata() | {non_neg_integer(),
-                                                               fun(() -> {sent, non_neg_integer()})},
-
-       %% Functions.
-       urldecode :: {fun((binary(), T) -> binary()), T}
-}).
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config
deleted file mode 100644 (file)
index 82d1fca..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{cover_enabled, true}.
-{deps, [
-       {proper, "1.0",
-               {git, "git://github.com/manopapad/proper.git", {tag, "v1.0"}}}
-]}.
-{eunit_opts, [verbose, {report, {eunit_surefire, [{dir, "."}]}}]}.
-{erl_opts, [
-%%     bin_opt_info,
-%%     warn_missing_spec,
-       warnings_as_errors,
-       warn_export_all
-]}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl
deleted file mode 100644 (file)
index 6defeea..0000000
+++ /dev/null
@@ -1,85 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Cowboy API to start and stop listeners.
--module(cowboy).
-
--export([start_listener/6, stop_listener/1, child_spec/6, accept_ack/1]).
-
-%% @doc Start a listener for the given transport and protocol.
-%%
-%% A listener is effectively a pool of <em>NbAcceptors</em> acceptors.
-%% Acceptors accept connections on the given <em>Transport</em> and forward
-%% requests to the given <em>Protocol</em> handler. Both transport and protocol
-%% modules can be given options through the <em>TransOpts</em> and the
-%% <em>ProtoOpts</em> arguments. Available options are documented in the
-%% <em>listen</em> transport function and in the protocol module of your choice.
-%%
-%% All acceptor and request processes are supervised by the listener.
-%%
-%% It is recommended to set a large enough number of acceptors to improve
-%% performance. The exact number depends of course on your hardware, on the
-%% protocol used and on the number of expected simultaneous connections.
-%%
-%% The <em>Transport</em> option <em>max_connections</em> allows you to define
-%% the maximum number of simultaneous connections for this listener. It defaults
-%% to 1024. See <em>cowboy_listener</em> for more details on limiting the number
-%% of connections.
-%%
-%% Although Cowboy includes a <em>cowboy_http_protocol</em> handler, other
-%% handlers can be created for different protocols like IRC, FTP and more.
-%%
-%% <em>Ref</em> can be used to stop the listener later on.
--spec start_listener(any(), non_neg_integer(), module(), any(), module(), any())
-       -> {ok, pid()}.
-start_listener(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts)
-               when is_integer(NbAcceptors) andalso is_atom(Transport)
-               andalso is_atom(Protocol) ->
-       supervisor:start_child(cowboy_sup, child_spec(Ref, NbAcceptors,
-               Transport, TransOpts, Protocol, ProtoOpts)).
-
-%% @doc Stop a listener identified by <em>Ref</em>.
-%% @todo Currently request processes aren't terminated with the listener.
--spec stop_listener(any()) -> ok | {error, not_found}.
-stop_listener(Ref) ->
-       case supervisor:terminate_child(cowboy_sup, {cowboy_listener_sup, Ref}) of
-               ok ->
-                       supervisor:delete_child(cowboy_sup, {cowboy_listener_sup, Ref});
-               {error, Reason} ->
-                       {error, Reason}
-       end.
-
-%% @doc Return a child spec suitable for embedding.
-%%
-%% When you want to embed cowboy in another application, you can use this
-%% function to create a <em>ChildSpec</em> suitable for use in a supervisor.
-%% The parameters are the same as in <em>start_listener/6</em> but rather
-%% than hooking the listener to the cowboy internal supervisor, it just returns
-%% the spec.
--spec child_spec(any(), non_neg_integer(), module(), any(), module(), any())
-       -> supervisor:child_spec().
-child_spec(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts)
-               when is_integer(NbAcceptors) andalso is_atom(Transport)
-               andalso is_atom(Protocol) ->
-       {{cowboy_listener_sup, Ref}, {cowboy_listener_sup, start_link, [
-               NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts
-       ]}, permanent, 5000, supervisor, [cowboy_listener_sup]}.
-
-%% @doc Acknowledge the accepted connection.
-%%
-%% Effectively used to make sure the socket control has been given to
-%% the protocol process before starting to use it.
--spec accept_ack(pid()) -> ok.
-accept_ack(ListenerPid) ->
-       receive {shoot, ListenerPid} -> ok end.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl
deleted file mode 100644 (file)
index 4cb9fa7..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @private
--module(cowboy_acceptor).
-
--export([start_link/7]). %% API.
--export([acceptor/7]). %% Internal.
-
-%% API.
-
--spec start_link(inet:socket(), module(), module(), any(),
-       non_neg_integer(), pid(), pid()) -> {ok, pid()}.
-start_link(LSocket, Transport, Protocol, Opts,
-               MaxConns, ListenerPid, ReqsSup) ->
-       Pid = spawn_link(?MODULE, acceptor,
-               [LSocket, Transport, Protocol, Opts, MaxConns, ListenerPid, ReqsSup]),
-       {ok, Pid}.
-
-%% Internal.
-
--spec acceptor(inet:socket(), module(), module(), any(),
-       non_neg_integer(), pid(), pid()) -> no_return().
-acceptor(LSocket, Transport, Protocol, Opts, MaxConns, ListenerPid, ReqsSup) ->
-       case Transport:accept(LSocket, 2000) of
-               {ok, CSocket} ->
-                       {ok, Pid} = supervisor:start_child(ReqsSup,
-                               [ListenerPid, CSocket, Transport, Protocol, Opts]),
-                       Transport:controlling_process(CSocket, Pid),
-                       {ok, NbConns} = cowboy_listener:add_connection(ListenerPid,
-                               default, Pid),
-                       Pid ! {shoot, ListenerPid},
-                       limit_reqs(ListenerPid, NbConns, MaxConns);
-               {error, timeout} ->
-                       ignore;
-               {error, _Reason} ->
-                       %% @todo Probably do something here. If the socket was closed,
-                       %%       we may want to try and listen again on the port?
-                       ignore
-       end,
-       ?MODULE:acceptor(LSocket, Transport, Protocol, Opts,
-               MaxConns, ListenerPid, ReqsSup).
-
--spec limit_reqs(pid(), non_neg_integer(), non_neg_integer()) -> ok.
-limit_reqs(_ListenerPid, NbConns, MaxConns) when NbConns =< MaxConns ->
-       ok;
-limit_reqs(ListenerPid, _NbConns, MaxConns) ->
-       cowboy_listener:wait(ListenerPid, default, MaxConns).
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl
deleted file mode 100644 (file)
index 17849a6..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @private
--module(cowboy_acceptors_sup).
--behaviour(supervisor).
-
--export([start_link/7]). %% API.
--export([init/1]). %% supervisor.
-
-%% API.
-
--spec start_link(non_neg_integer(), module(), any(),
-       module(), any(), pid(), pid()) -> {ok, pid()}.
-start_link(NbAcceptors, Transport, TransOpts,
-               Protocol, ProtoOpts, ListenerPid, ReqsPid) ->
-       supervisor:start_link(?MODULE, [NbAcceptors, Transport, TransOpts,
-               Protocol, ProtoOpts, ListenerPid, ReqsPid]).
-
-%% supervisor.
-
--spec init(list()) -> {ok, {{one_for_one, 10, 10}, list()}}.
-init([NbAcceptors, Transport, TransOpts,
-               Protocol, ProtoOpts, ListenerPid, ReqsPid]) ->
-       {ok, LSocket} = Transport:listen(TransOpts),
-       MaxConns = proplists:get_value(max_connections, TransOpts, 1024),
-       Procs = [{{acceptor, self(), N}, {cowboy_acceptor, start_link, [
-                               LSocket, Transport, Protocol, ProtoOpts,
-                               MaxConns, ListenerPid, ReqsPid
-      ]}, permanent, brutal_kill, worker, []}
-               || N <- lists:seq(1, NbAcceptors)],
-       {ok, {{one_for_one, 10, 10}, Procs}}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl
deleted file mode 100644 (file)
index 7f5ab60..0000000
+++ /dev/null
@@ -1,392 +0,0 @@
-%% Copyright 2007 Mochi Media, Inc.
-%% Copyright 2011 Thomas Burdick <thomas.burdick@gmail.com>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc HTTP Cookie parsing and generating (RFC 2965).
-
--module(cowboy_cookies).
-
--export([parse_cookie/1, cookie/3, cookie/2]). %% API.
-
-%% Types.
--type kv() :: {Name::binary(), Value::binary()}.
--type kvlist() :: [kv()].
--type cookie_option() :: {max_age, integer()}
-                               | {local_time, calendar:datetime()}
-                               | {domain, binary()} | {path, binary()}
-                               | {secure, true | false} | {http_only, true | false}.
--export_type([kv/0, kvlist/0, cookie_option/0]).
-
--define(QUOTE, $\").
-
--include_lib("eunit/include/eunit.hrl").
-
-%% API.
-
-%% @doc Parse the contents of a Cookie header field, ignoring cookie
-%% attributes, and return a simple property list.
--spec parse_cookie(binary()) -> kvlist().
-parse_cookie(<<>>) ->
-       [];
-parse_cookie(Cookie) when is_binary(Cookie) ->
-       parse_cookie(Cookie, []).
-
-%% @equiv cookie(Key, Value, [])
--spec cookie(binary(), binary()) -> kv().
-cookie(Key, Value) when is_binary(Key) andalso is_binary(Value) ->
-       cookie(Key, Value, []).
-
-%% @doc Generate a Set-Cookie header field tuple.
--spec cookie(binary(), binary(), [cookie_option()]) -> kv().
-cookie(Key, Value, Options) when is_binary(Key)
-               andalso is_binary(Value) andalso is_list(Options) ->
-       Cookie = <<(any_to_binary(Key))/binary, "=",
-               (quote(Value))/binary, "; Version=1">>,
-       %% Set-Cookie:
-       %%    Comment, Domain, Max-Age, Path, Secure, Version
-       ExpiresPart =
-               case proplists:get_value(max_age, Options) of
-                       undefined ->
-                               <<"">>;
-                       RawAge ->
-                               When = case proplists:get_value(local_time, Options) of
-                                               undefined ->
-                                                       calendar:local_time();
-                                               LocalTime ->
-                                                       LocalTime
-                                       end,
-                               Age = case RawAge < 0 of
-                                               true ->
-                                                       0;
-                                               false ->
-                                                       RawAge
-                                       end,
-                               AgeBinary = quote(Age),
-                               CookieDate = age_to_cookie_date(Age, When),
-                               <<"; Expires=", CookieDate/binary,
-                               "; Max-Age=", AgeBinary/binary>>
-               end,
-       SecurePart =
-               case proplists:get_value(secure, Options) of
-                       true ->
-                               <<"; Secure">>;
-                       _ ->
-                               <<"">>
-               end,
-       DomainPart =
-               case proplists:get_value(domain, Options) of
-                       undefined ->
-                               <<"">>;
-                       Domain ->
-                               <<"; Domain=", (quote(Domain))/binary>>
-               end,
-       PathPart =
-               case proplists:get_value(path, Options) of
-                       undefined ->
-                               <<"">>;
-                       Path ->
-                               <<"; Path=", (quote(Path))/binary>>
-               end,
-       HttpOnlyPart =
-               case proplists:get_value(http_only, Options) of
-                       true ->
-                               <<"; HttpOnly">>;
-                       _ ->
-                               <<"">>
-               end,
-       CookieParts = <<Cookie/binary, ExpiresPart/binary, SecurePart/binary,
-               DomainPart/binary, PathPart/binary, HttpOnlyPart/binary>>,
-       {<<"Set-Cookie">>, CookieParts}.
-
-%% Internal.
-
-%% @doc Check if a character is a white space character.
-is_whitespace($\s) -> true;
-is_whitespace($\t) -> true;
-is_whitespace($\r) -> true;
-is_whitespace($\n) -> true;
-is_whitespace(_) -> false.
-
-%% @doc Check if a character is a seperator.
-is_separator(C) when C < 32 -> true;
-is_separator($\s) -> true;
-is_separator($\t) -> true;
-is_separator($() -> true;
-is_separator($)) -> true;
-is_separator($<) -> true;
-is_separator($>) -> true;
-is_separator($@) -> true;
-is_separator($,) -> true;
-is_separator($;) -> true;
-is_separator($:) -> true;
-is_separator($\\) -> true;
-is_separator(?QUOTE) -> true;
-is_separator($/) -> true;
-is_separator($[) -> true;
-is_separator($]) -> true;
-is_separator($?) -> true;
-is_separator($=) -> true;
-is_separator(${) -> true;
-is_separator($}) -> true;
-is_separator(_) -> false.
-
-%% @doc Check if a binary has an ASCII seperator character.
-has_seperator(<<>>) ->
-       false;
-has_seperator(<<$/, Rest/binary>>) ->
-       has_seperator(Rest);
-has_seperator(<<C, Rest/binary>>) ->
-       case is_separator(C) of
-               true ->
-                       true;
-               false ->
-                       has_seperator(Rest)
-       end.
-
-%% @doc Convert to a binary and raise an error if quoting is required. Quoting
-%% is broken in different ways for different browsers. Its better to simply
-%% avoiding doing it at all.
-%% @end
--spec quote(term()) -> binary().
-quote(V0) ->
-       V = any_to_binary(V0),
-       case has_seperator(V) of
-               true ->
-                       erlang:error({cookie_quoting_required, V});
-               false ->
-                       V
-       end.
-
--spec add_seconds(integer(), calendar:datetime()) -> calendar:datetime().
-add_seconds(Secs, LocalTime) ->
-       Greg = calendar:datetime_to_gregorian_seconds(LocalTime),
-       calendar:gregorian_seconds_to_datetime(Greg + Secs).
-
--spec age_to_cookie_date(integer(), calendar:datetime()) -> binary().
-age_to_cookie_date(Age, LocalTime) ->
-       cowboy_clock:rfc2109(add_seconds(Age, LocalTime)).
-
--spec parse_cookie(binary(), kvlist()) -> kvlist().
-parse_cookie(<<>>, Acc) ->
-       lists:reverse(Acc);
-parse_cookie(String, Acc) ->
-       {{Token, Value}, Rest} = read_pair(String),
-       Acc1 = case Token of
-                       <<"">> ->
-                               Acc;
-                       <<"$", _R/binary>> ->
-                               Acc;
-                       _ ->
-                               [{Token, Value} | Acc]
-               end,
-       parse_cookie(Rest, Acc1).
-
--spec read_pair(binary()) -> {{binary(), binary()}, binary()}.
-read_pair(String) ->
-       {Token, Rest} = read_token(skip_whitespace(String)),
-       {Value, Rest1} = read_value(skip_whitespace(Rest)),
-       {{Token, Value}, skip_past_separator(Rest1)}.
-
--spec read_value(binary()) -> {binary(), binary()}.
-read_value(<<"=",  Value/binary>>) ->
-       Value1 = skip_whitespace(Value),
-       case Value1 of
-               <<?QUOTE, _R/binary>> ->
-                       read_quoted(Value1);
-               _ ->
-                       read_token(Value1)
-       end;
-read_value(String) ->
-       {<<"">>, String}.
-
--spec read_quoted(binary()) -> {binary(), binary()}.
-read_quoted(<<?QUOTE, String/binary>>) ->
-       read_quoted(String, <<"">>).
-
--spec read_quoted(binary(), binary()) -> {binary(), binary()}.
-read_quoted(<<"">>, Acc) ->
-       {Acc, <<"">>};
-read_quoted(<<?QUOTE, Rest/binary>>, Acc) ->
-       {Acc, Rest};
-read_quoted(<<$\\, Any, Rest/binary>>, Acc) ->
-       read_quoted(Rest, <<Acc/binary, Any>>);
-read_quoted(<<C, Rest/binary>>, Acc) ->
-       read_quoted(Rest, <<Acc/binary, C>>).
-
-%% @doc Drop characters while a function returns true.
-binary_dropwhile(_F, <<"">>) ->
-       <<"">>;
-binary_dropwhile(F, String) ->
-       <<C, Rest/binary>> = String,
-       case F(C) of
-               true ->
-                       binary_dropwhile(F, Rest);
-               false ->
-                       String
-       end.
-
-%% @doc Remove leading whitespace.
--spec skip_whitespace(binary()) -> binary().
-skip_whitespace(String) ->
-       binary_dropwhile(fun is_whitespace/1, String).
-
-%% @doc Split a binary when the current character causes F to return true.
-binary_splitwith(_F, Head, <<>>) ->
-       {Head, <<>>};
-binary_splitwith(F, Head, Tail) ->
-       <<C, NTail/binary>> = Tail,
-       case F(C) of
-               true ->
-                       {Head, Tail};
-               false ->
-                       binary_splitwith(F, <<Head/binary, C>>, NTail)
-       end.
-
-%% @doc Split a binary with a function returning true or false on each char.
-binary_splitwith(F, String) ->
-       binary_splitwith(F, <<>>, String).
-
-%% @doc Split the binary when the next seperator is found.
--spec read_token(binary()) -> {binary(), binary()}.
-read_token(String) ->
-       binary_splitwith(fun is_separator/1, String).
-
-%% @doc Return string after ; or , characters.
--spec skip_past_separator(binary()) -> binary().
-skip_past_separator(<<"">>) ->
-       <<"">>;
-skip_past_separator(<<";", Rest/binary>>) ->
-       Rest;
-skip_past_separator(<<",", Rest/binary>>) ->
-       Rest;
-skip_past_separator(<<_C, Rest/binary>>) ->
-       skip_past_separator(Rest).
-
--spec any_to_binary(binary() | string() | atom() | integer()) -> binary().
-any_to_binary(V) when is_binary(V) ->
-       V;
-any_to_binary(V) when is_list(V) ->
-       erlang:list_to_binary(V);
-any_to_binary(V) when is_atom(V) ->
-       erlang:atom_to_binary(V, latin1);
-any_to_binary(V) when is_integer(V) ->
-       list_to_binary(integer_to_list(V)).
-
-%% Tests.
-
--ifdef(TEST).
-
-quote_test() ->
-       %% ?assertError eunit macro is not compatible with coverage module
-       _ = try quote(<<":wq">>)
-       catch error:{cookie_quoting_required, <<":wq">>} -> ok
-       end,
-       ?assertEqual(<<"foo">>,quote(foo)),
-       ok.
-
-parse_cookie_test() ->
-       %% RFC example
-       C1 = <<"$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
-       Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
-       Shipping=\"FedEx\"; $Path=\"/acme\"">>,
-       ?assertEqual(
-       [{<<"Customer">>,<<"WILE_E_COYOTE">>},
-               {<<"Part_Number">>,<<"Rocket_Launcher_0001">>},
-               {<<"Shipping">>,<<"FedEx">>}],
-       parse_cookie(C1)),
-       %% Potential edge cases
-       ?assertEqual(
-       [{<<"foo">>, <<"x">>}],
-       parse_cookie(<<"foo=\"\\x\"">>)),
-       ?assertEqual(
-       [],
-       parse_cookie(<<"=">>)),
-       ?assertEqual(
-       [{<<"foo">>, <<"">>}, {<<"bar">>, <<"">>}],
-       parse_cookie(<<"  foo ; bar  ">>)),
-       ?assertEqual(
-       [{<<"foo">>, <<"">>}, {<<"bar">>, <<"">>}],
-       parse_cookie(<<"foo=;bar=">>)),
-       ?assertEqual(
-       [{<<"foo">>, <<"\";">>}, {<<"bar">>, <<"">>}],
-       parse_cookie(<<"foo = \"\\\";\";bar ">>)),
-       ?assertEqual(
-       [{<<"foo">>, <<"\";bar">>}],
-       parse_cookie(<<"foo=\"\\\";bar">>)),
-       ?assertEqual(
-       [],
-       parse_cookie(<<"">>)),
-       ?assertEqual(
-       [{<<"foo">>, <<"bar">>}, {<<"baz">>, <<"wibble">>}],
-       parse_cookie(<<"foo=bar , baz=wibble ">>)),
-       ok.
-
-domain_test() ->
-       ?assertEqual(
-       {<<"Set-Cookie">>,
-               <<"Customer=WILE_E_COYOTE; "
-               "Version=1; "
-               "Domain=acme.com; "
-               "HttpOnly">>},
-       cookie(<<"Customer">>, <<"WILE_E_COYOTE">>,
-                       [{http_only, true}, {domain, <<"acme.com">>}])),
-       ok.
-
-local_time_test() ->
-       {<<"Set-Cookie">>, B} = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>,
-                                                       [{max_age, 111}, {secure, true}]),
-
-       ?assertMatch(
-       [<<"Customer=WILE_E_COYOTE">>,
-               <<" Version=1">>,
-               <<" Expires=", _R/binary>>,
-               <<" Max-Age=111">>,
-               <<" Secure">>],
-       binary:split(B, <<";">>, [global])),
-       ok.
-
--spec cookie_test() -> no_return(). %% Not actually true, just a bad option.
-cookie_test() ->
-       C1 = {<<"Set-Cookie">>,
-               <<"Customer=WILE_E_COYOTE; "
-               "Version=1; "
-               "Path=/acme">>},
-       C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, [{path, <<"/acme">>}]),
-
-       C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>,
-                               [{path, <<"/acme">>}, {badoption, <<"negatory">>}]),
-
-       {<<"Set-Cookie">>,<<"=NoKey; Version=1">>}
-               = cookie(<<"">>, <<"NoKey">>, []),
-       {<<"Set-Cookie">>,<<"=NoKey; Version=1">>}
-               = cookie(<<"">>, <<"NoKey">>),
-       LocalTime = calendar:universal_time_to_local_time(
-               {{2007, 5, 15}, {13, 45, 33}}),
-       C2 = {<<"Set-Cookie">>,
-               <<"Customer=WILE_E_COYOTE; "
-               "Version=1; "
-               "Expires=Tue, 15 May 2007 13:45:33 GMT; "
-               "Max-Age=0">>},
-       C2 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>,
-                               [{max_age, -111}, {local_time, LocalTime}]),
-       C3 = {<<"Set-Cookie">>,
-               <<"Customer=WILE_E_COYOTE; "
-               "Version=1; "
-               "Expires=Wed, 16 May 2007 13:45:50 GMT; "
-               "Max-Age=86417">>},
-       C3 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>,
-                               [{max_age, 86417}, {local_time, LocalTime}]),
-       ok.
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl
deleted file mode 100644 (file)
index 22f6e1e..0000000
+++ /dev/null
@@ -1,309 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Dispatch requests according to a hostname and path.
--module(cowboy_dispatcher).
-
--export([split_host/1, split_path/2, match/3]). %% API.
-
--type bindings() :: list({atom(), binary()}).
--type tokens() :: list(binary()).
--type match_rule() :: '_' | '*' | list(binary() | '_' | '...' | atom()).
--type dispatch_path() :: list({match_rule(), module(), any()}).
--type dispatch_rule() :: {Host::match_rule(), Path::dispatch_path()}.
--type dispatch_rules() :: list(dispatch_rule()).
-
--export_type([bindings/0, tokens/0, dispatch_rules/0]).
-
--include_lib("eunit/include/eunit.hrl").
-
-%% API.
-
-%% @doc Split a hostname into a list of tokens.
--spec split_host(binary())
-       -> {tokens(), binary(), undefined | inet:ip_port()}.
-split_host(<<>>) ->
-       {[], <<>>, undefined};
-split_host(Host) ->
-       case binary:split(Host, <<":">>) of
-               [Host] ->
-                       {binary:split(Host, <<".">>, [global, trim]), Host, undefined};
-               [Host2, Port] ->
-                       {binary:split(Host2, <<".">>, [global, trim]), Host2,
-                               list_to_integer(binary_to_list(Port))}
-       end.
-
-%% @doc Split a path into a list of path segments.
-%%
-%% Following RFC2396, this function may return path segments containing any
-%% character, including <em>/</em> if, and only if, a <em>/</em> was escaped
-%% and part of a path segment.
--spec split_path(binary(), fun((binary()) -> binary())) ->
-               {tokens(), binary(), binary()}.
-split_path(Path, URLDec) ->
-       case binary:split(Path, <<"?">>) of
-               [Path] -> {do_split_path(Path, <<"/">>, URLDec), Path, <<>>};
-               [<<>>, Qs] -> {[], <<>>, Qs};
-               [Path2, Qs] -> {do_split_path(Path2, <<"/">>, URLDec), Path2, Qs}
-       end.
-
--spec do_split_path(binary(), <<_:8>>, fun((binary()) -> binary())) -> tokens().
-do_split_path(RawPath, Separator, URLDec) ->
-       EncodedPath = case binary:split(RawPath, Separator, [global, trim]) of
-               [<<>>|Path] -> Path;
-               Path -> Path
-       end,
-       [URLDec(Token) || Token <- EncodedPath].
-
-%% @doc Match hostname tokens and path tokens against dispatch rules.
-%%
-%% It is typically used for matching tokens for the hostname and path of
-%% the request against a global dispatch rule for your listener.
-%%
-%% Dispatch rules are a list of <em>{Hostname, PathRules}</em> tuples, with
-%% <em>PathRules</em> being a list of <em>{Path, HandlerMod, HandlerOpts}</em>.
-%%
-%% <em>Hostname</em> and <em>Path</em> are match rules and can be either the
-%% atom <em>'_'</em>, which matches everything for a single token, the atom
-%% <em>'*'</em>, which matches everything for the rest of the tokens, or a
-%% list of tokens. Each token can be either a binary, the atom <em>'_'</em>,
-%% the atom '...' or a named atom. A binary token must match exactly,
-%% <em>'_'</em> matches everything for a single token, <em>'...'</em> matches
-%% everything for the rest of the tokens and a named atom will bind the
-%% corresponding token value and return it.
-%%
-%% The list of hostname tokens is reversed before matching. For example, if
-%% we were to match "www.dev-extend.eu", we would first match "eu", then
-%% "dev-extend", then "www". This means that in the context of hostnames,
-%% the <em>'...'</em> atom matches properly the lower levels of the domain
-%% as would be expected.
-%%
-%% When a result is found, this function will return the handler module and
-%% options found in the dispatch list, a key-value list of bindings and
-%% the tokens that were matched by the <em>'...'</em> atom for both the
-%% hostname and path.
--spec match(Host::tokens(), Path::tokens(), dispatch_rules())
-       -> {ok, module(), any(), bindings(),
-               HostInfo::undefined | tokens(),
-               PathInfo::undefined | tokens()}
-       | {error, notfound, host} | {error, notfound, path}.
-match(_Host, _Path, []) ->
-       {error, notfound, host};
-match(_Host, Path, [{'_', PathMatchs}|_Tail]) ->
-       match_path(Path, PathMatchs, [], undefined);
-match(Host, Path, [{HostMatch, PathMatchs}|Tail]) ->
-       case try_match(host, Host, HostMatch) of
-               false ->
-                       match(Host, Path, Tail);
-               {true, HostBinds, undefined} ->
-                       match_path(Path, PathMatchs, HostBinds, undefined);
-               {true, HostBinds, HostInfo} ->
-                       match_path(Path, PathMatchs, HostBinds, lists:reverse(HostInfo))
-       end.
-
--spec match_path(tokens(), dispatch_path(), bindings(),
-       HostInfo::undefined | tokens())
-       -> {ok, module(), any(), bindings(),
-               HostInfo::undefined | tokens(),
-               PathInfo::undefined | tokens()}
-       | {error, notfound, path}.
-match_path(_Path, [], _HostBinds, _HostInfo) ->
-       {error, notfound, path};
-match_path(_Path, [{'_', Handler, Opts}|_Tail], HostBinds, HostInfo) ->
-       {ok, Handler, Opts, HostBinds, HostInfo, undefined};
-match_path('*', [{'*', Handler, Opts}|_Tail], HostBinds, HostInfo) ->
-       {ok, Handler, Opts, HostBinds, HostInfo, undefined};
-match_path(Path, [{PathMatch, Handler, Opts}|Tail], HostBinds, HostInfo) ->
-       case try_match(path, Path, PathMatch) of
-               false ->
-                       match_path(Path, Tail, HostBinds, HostInfo);
-               {true, PathBinds, PathInfo} ->
-                       {ok, Handler, Opts, HostBinds ++ PathBinds, HostInfo, PathInfo}
-       end.
-
-%% Internal.
-
--spec try_match(host | path, tokens(), match_rule())
-       -> {true, bindings(), undefined | tokens()} | false.
-try_match(host, List, Match) ->
-       list_match(lists:reverse(List), lists:reverse(Match), []);
-try_match(path, List, Match) ->
-       list_match(List, Match, []).
-
--spec list_match(tokens(), match_rule(), bindings())
-       -> {true, bindings(), undefined | tokens()} | false.
-%% Atom '...' matches any trailing path, stop right now.
-list_match(List, ['...'], Binds) ->
-       {true, Binds, List};
-%% Atom '_' matches anything, continue.
-list_match([_E|Tail], ['_'|TailMatch], Binds) ->
-       list_match(Tail, TailMatch, Binds);
-%% Both values match, continue.
-list_match([E|Tail], [E|TailMatch], Binds) ->
-       list_match(Tail, TailMatch, Binds);
-%% Bind E to the variable name V and continue.
-list_match([E|Tail], [V|TailMatch], Binds) when is_atom(V) ->
-       list_match(Tail, TailMatch, [{V, E}|Binds]);
-%% Match complete.
-list_match([], [], Binds) ->
-       {true, Binds, undefined};
-%% Values don't match, stop.
-list_match(_List, _Match, _Binds) ->
-       false.
-
-%% Tests.
-
--ifdef(TEST).
-
-split_host_test_() ->
-       %% {Host, Result}
-       Tests = [
-               {<<"">>, {[], <<"">>, undefined}},
-               {<<".........">>, {[], <<".........">>, undefined}},
-               {<<"*">>, {[<<"*">>], <<"*">>, undefined}},
-               {<<"cowboy.dev-extend.eu">>,
-                       {[<<"cowboy">>, <<"dev-extend">>, <<"eu">>],
-                        <<"cowboy.dev-extend.eu">>, undefined}},
-               {<<"dev-extend..eu">>,
-                       {[<<"dev-extend">>, <<>>, <<"eu">>],
-                        <<"dev-extend..eu">>, undefined}},
-               {<<"dev-extend.eu">>,
-                       {[<<"dev-extend">>, <<"eu">>], <<"dev-extend.eu">>, undefined}},
-               {<<"dev-extend.eu:8080">>,
-                       {[<<"dev-extend">>, <<"eu">>], <<"dev-extend.eu">>, 8080}},
-               {<<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>,
-                       {[<<"a">>, <<"b">>, <<"c">>, <<"d">>, <<"e">>, <<"f">>, <<"g">>,
-                         <<"h">>, <<"i">>, <<"j">>, <<"k">>, <<"l">>, <<"m">>, <<"n">>,
-                         <<"o">>, <<"p">>, <<"q">>, <<"r">>, <<"s">>, <<"t">>, <<"u">>,
-                         <<"v">>, <<"w">>, <<"x">>, <<"y">>, <<"z">>],
-                        <<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>,
-                        undefined}}
-       ],
-       [{H, fun() -> R = split_host(H) end} || {H, R} <- Tests].
-
-split_host_fail_test_() ->
-       Tests = [
-               <<"dev-extend.eu:owns">>,
-               <<"dev-extend.eu: owns">>,
-               <<"dev-extend.eu:42fun">>,
-               <<"dev-extend.eu: 42fun">>,
-               <<"dev-extend.eu:42 fun">>,
-               <<"dev-extend.eu:fun 42">>,
-               <<"dev-extend.eu: 42">>,
-               <<":owns">>,
-               <<":42 fun">>
-       ],
-       [{H, fun() -> case catch split_host(H) of
-               {'EXIT', _Reason} -> ok
-       end end} || H <- Tests].
-
-split_path_test_() ->
-       %% {Path, Result, QueryString}
-       Tests = [
-               {<<"?">>, [], <<"">>, <<"">>},
-               {<<"???">>, [], <<"">>, <<"??">>},
-               {<<"/">>, [], <<"/">>, <<"">>},
-               {<<"/users">>, [<<"users">>], <<"/users">>, <<"">>},
-               {<<"/users?">>, [<<"users">>], <<"/users">>, <<"">>},
-               {<<"/users?a">>, [<<"users">>], <<"/users">>, <<"a">>},
-               {<<"/users/42/friends?a=b&c=d&e=notsure?whatever">>,
-                       [<<"users">>, <<"42">>, <<"friends">>],
-                       <<"/users/42/friends">>, <<"a=b&c=d&e=notsure?whatever">>},
-               {<<"/users/a+b/c%21d?e+f=g+h">>,
-                       [<<"users">>, <<"a b">>, <<"c!d">>],
-                       <<"/users/a+b/c%21d">>, <<"e+f=g+h">>}
-       ],
-       URLDecode = fun(Bin) -> cowboy_http:urldecode(Bin, crash) end,
-       [{P, fun() -> {R, RawP, Qs} = split_path(P, URLDecode) end}
-               || {P, R, RawP, Qs} <- Tests].
-
-match_test_() ->
-       Dispatch = [
-               {[<<"www">>, '_', <<"dev-extend">>, <<"eu">>], [
-                       {[<<"users">>, '_', <<"mails">>], match_any_subdomain_users, []}
-               ]},
-               {[<<"dev-extend">>, <<"eu">>], [
-                       {[<<"users">>, id, <<"friends">>], match_extend_users_friends, []},
-                       {'_', match_extend, []}
-               ]},
-               {[<<"dev-extend">>, var], [
-                       {[<<"threads">>, var], match_duplicate_vars,
-                               [we, {expect, two}, var, here]}
-               ]},
-               {[<<"erlang">>, ext], [
-                       {'_', match_erlang_ext, []}
-               ]},
-               {'_', [
-                       {[<<"users">>, id, <<"friends">>], match_users_friends, []},
-                       {'_', match_any, []}
-               ]}
-       ],
-       %% {Host, Path, Result}
-       Tests = [
-               {[<<"any">>], [], {ok, match_any, [], []}},
-               {[<<"www">>, <<"any">>, <<"dev-extend">>, <<"eu">>],
-                       [<<"users">>, <<"42">>, <<"mails">>],
-                       {ok, match_any_subdomain_users, [], []}},
-               {[<<"www">>, <<"dev-extend">>, <<"eu">>],
-                       [<<"users">>, <<"42">>, <<"mails">>], {ok, match_any, [], []}},
-               {[<<"www">>, <<"dev-extend">>, <<"eu">>], [], {ok, match_any, [], []}},
-               {[<<"www">>, <<"any">>, <<"dev-extend">>, <<"eu">>],
-                       [<<"not_users">>, <<"42">>, <<"mails">>], {error, notfound, path}},
-               {[<<"dev-extend">>, <<"eu">>], [], {ok, match_extend, [], []}},
-               {[<<"dev-extend">>, <<"eu">>], [<<"users">>, <<"42">>, <<"friends">>],
-                       {ok, match_extend_users_friends, [], [{id, <<"42">>}]}},
-               {[<<"erlang">>, <<"fr">>], '_',
-                       {ok, match_erlang_ext, [], [{ext, <<"fr">>}]}},
-               {[<<"any">>], [<<"users">>, <<"444">>, <<"friends">>],
-                       {ok, match_users_friends, [], [{id, <<"444">>}]}},
-               {[<<"dev-extend">>, <<"fr">>], [<<"threads">>, <<"987">>],
-                       {ok, match_duplicate_vars, [we, {expect, two}, var, here],
-                       [{var, <<"fr">>}, {var, <<"987">>}]}}
-       ],
-       [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() ->
-               {ok, Handler, Opts, Binds, undefined, undefined} = match(H, P, Dispatch)
-       end} || {H, P, {ok, Handler, Opts, Binds}} <- Tests].
-
-match_info_test_() ->
-       Dispatch = [
-               {[<<"www">>, <<"dev-extend">>, <<"eu">>], [
-                       {[<<"pathinfo">>, <<"is">>, <<"next">>, '...'], match_path, []}
-               ]},
-               {['...', <<"dev-extend">>, <<"eu">>], [
-                       {'_', match_any, []}
-               ]}
-       ],
-       Tests = [
-               {[<<"dev-extend">>, <<"eu">>], [],
-                       {ok, match_any, [], [], [], undefined}},
-               {[<<"bugs">>, <<"dev-extend">>, <<"eu">>], [],
-                       {ok, match_any, [], [], [<<"bugs">>], undefined}},
-               {[<<"cowboy">>, <<"bugs">>, <<"dev-extend">>, <<"eu">>], [],
-                       {ok, match_any, [], [], [<<"cowboy">>, <<"bugs">>], undefined}},
-               {[<<"www">>, <<"dev-extend">>, <<"eu">>],
-                       [<<"pathinfo">>, <<"is">>, <<"next">>],
-                       {ok, match_path, [], [], undefined, []}},
-               {[<<"www">>, <<"dev-extend">>, <<"eu">>],
-                       [<<"pathinfo">>, <<"is">>, <<"next">>, <<"path_info">>],
-                       {ok, match_path, [], [], undefined, [<<"path_info">>]}},
-               {[<<"www">>, <<"dev-extend">>, <<"eu">>],
-                       [<<"pathinfo">>, <<"is">>, <<"next">>, <<"foo">>, <<"bar">>],
-                       {ok, match_path, [], [], undefined, [<<"foo">>, <<"bar">>]}}
-       ],
-       [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() ->
-               R = match(H, P, Dispatch)
-       end} || {H, P, R} <- Tests].
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl
deleted file mode 100644 (file)
index b220b09..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Handler for HTTP requests.
-%%
-%% HTTP handlers must implement three callbacks: <em>init/3</em>,
-%% <em>handle/2</em> and <em>terminate/2</em>, called one after another in
-%% that order.
-%%
-%% <em>init/3</em> is meant for initialization. It receives information about
-%% the transport and protocol used, along with the handler options from the
-%% dispatch list, and allows you to upgrade the protocol if needed. You can
-%% define a request-wide state here.
-%%
-%% <em>handle/2</em> is meant for handling the request. It receives the
-%% request and the state previously defined.
-%%
-%% <em>terminate/2</em> is meant for cleaning up. It also receives the
-%% request and the state previously defined.
-%%
-%% You do not have to read the request body or even send a reply if you do
-%% not need to. Cowboy will properly handle these cases and clean-up afterwards.
-%% In doubt it'll simply close the connection.
-%%
-%% Note that when upgrading the connection to WebSocket you do not need to
-%% define the <em>handle/2</em> and <em>terminate/2</em> callbacks.
--module(cowboy_http_handler).
-
--export([behaviour_info/1]).
-
-%% @private
--spec behaviour_info(_)
-       -> undefined | [{handle, 2} | {init, 3} | {terminate, 2}, ...].
-behaviour_info(callbacks) ->
-       [{init, 3}, {handle, 2}, {terminate, 2}];
-behaviour_info(_Other) ->
-       undefined.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl
deleted file mode 100644 (file)
index 0183785..0000000
+++ /dev/null
@@ -1,472 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc HTTP protocol handler.
-%%
-%% The available options are:
-%% <dl>
-%%  <dt>dispatch</dt><dd>The dispatch list for this protocol.</dd>
-%%  <dt>max_empty_lines</dt><dd>Max number of empty lines before a request.
-%%   Defaults to 5.</dd>
-%%  <dt>timeout</dt><dd>Time in milliseconds before an idle
-%%   connection is closed. Defaults to 5000 milliseconds.</dd>
-%%  <dt>urldecode</dt><dd>Function and options argument to use when decoding
-%%   URL encoded strings. Defaults to `{fun cowboy_http:urldecode/2, crash}'.
-%%   </dd>
-%% </dl>
-%%
-%% Note that there is no need to monitor these processes when using Cowboy as
-%% an application as it already supervises them under the listener supervisor.
-%%
-%% @see cowboy_dispatcher
-%% @see cowboy_http_handler
--module(cowboy_http_protocol).
--behaviour(cowboy_protocol).
-
--export([start_link/4]). %% API.
--export([init/4, parse_request/1, handler_loop/3]). %% FSM.
-
--include("include/http.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--record(state, {
-       listener :: pid(),
-       socket :: inet:socket(),
-       transport :: module(),
-       dispatch :: cowboy_dispatcher:dispatch_rules(),
-       handler :: {module(), any()},
-       urldecode :: {fun((binary(), T) -> binary()), T},
-       req_empty_lines = 0 :: integer(),
-       max_empty_lines :: integer(),
-       req_keepalive = 1 :: integer(),
-       max_keepalive :: integer(),
-       max_line_length :: integer(),
-       timeout :: timeout(),
-       buffer = <<>> :: binary(),
-       hibernate = false,
-       loop_timeout = infinity :: timeout(),
-       loop_timeout_ref
-}).
-
-%% API.
-
-%% @doc Start an HTTP protocol process.
--spec start_link(pid(), inet:socket(), module(), any()) -> {ok, pid()}.
-start_link(ListenerPid, Socket, Transport, Opts) ->
-       Pid = spawn_link(?MODULE, init, [ListenerPid, Socket, Transport, Opts]),
-       {ok, Pid}.
-
-%% FSM.
-
-%% @private
--spec init(pid(), inet:socket(), module(), any()) -> ok | none().
-init(ListenerPid, Socket, Transport, Opts) ->
-       Dispatch = proplists:get_value(dispatch, Opts, []),
-       MaxEmptyLines = proplists:get_value(max_empty_lines, Opts, 5),
-       MaxKeepalive = proplists:get_value(max_keepalive, Opts, infinity),
-       MaxLineLength = proplists:get_value(max_line_length, Opts, 4096),
-       Timeout = proplists:get_value(timeout, Opts, 5000),
-       URLDecDefault = {fun cowboy_http:urldecode/2, crash},
-       URLDec = proplists:get_value(urldecode, Opts, URLDecDefault),
-       ok = cowboy:accept_ack(ListenerPid),
-       wait_request(#state{listener=ListenerPid, socket=Socket, transport=Transport,
-               dispatch=Dispatch, max_empty_lines=MaxEmptyLines,
-               max_keepalive=MaxKeepalive, max_line_length=MaxLineLength,
-               timeout=Timeout, urldecode=URLDec}).
-
-%% @private
--spec parse_request(#state{}) -> ok | none().
-%% We limit the length of the Request-line to MaxLength to avoid endlessly
-%% reading from the socket and eventually crashing.
-parse_request(State=#state{buffer=Buffer, max_line_length=MaxLength}) ->
-       case erlang:decode_packet(http_bin, Buffer, []) of
-               {ok, Request, Rest} -> request(Request, State#state{buffer=Rest});
-               {more, _Length} when byte_size(Buffer) > MaxLength ->
-                       error_terminate(413, State);
-               {more, _Length} -> wait_request(State);
-               {error, _Reason} -> error_terminate(400, State)
-       end.
-
--spec wait_request(#state{}) -> ok | none().
-wait_request(State=#state{socket=Socket, transport=Transport,
-               timeout=T, buffer=Buffer}) ->
-       case Transport:recv(Socket, 0, T) of
-               {ok, Data} -> parse_request(State#state{
-                       buffer= << Buffer/binary, Data/binary >>});
-               {error, _Reason} -> terminate(State)
-       end.
-
--spec request({http_request, cowboy_http:method(), cowboy_http:uri(),
-       cowboy_http:version()}, #state{}) -> ok | none().
-request({http_request, _Method, _URI, Version}, State)
-               when Version =/= {1, 0}, Version =/= {1, 1} ->
-       error_terminate(505, State);
-request({http_request, Method, {abs_path, AbsPath}, Version},
-               State=#state{socket=Socket, transport=Transport,
-               urldecode={URLDecFun, URLDecArg}=URLDec}) ->
-       URLDecode = fun(Bin) -> URLDecFun(Bin, URLDecArg) end,
-       {Path, RawPath, Qs} = cowboy_dispatcher:split_path(AbsPath, URLDecode),
-       ConnAtom = version_to_connection(Version),
-       parse_header(#http_req{socket=Socket, transport=Transport,
-               connection=ConnAtom, pid=self(), method=Method, version=Version,
-               path=Path, raw_path=RawPath, raw_qs=Qs, urldecode=URLDec}, State);
-request({http_request, Method, '*', Version},
-               State=#state{socket=Socket, transport=Transport, urldecode=URLDec}) ->
-       ConnAtom = version_to_connection(Version),
-       parse_header(#http_req{socket=Socket, transport=Transport,
-               connection=ConnAtom, pid=self(), method=Method, version=Version,
-               path='*', raw_path= <<"*">>, raw_qs= <<>>, urldecode=URLDec}, State);
-request({http_request, _Method, _URI, _Version}, State) ->
-       error_terminate(501, State);
-request({http_error, <<"\r\n">>},
-               State=#state{req_empty_lines=N, max_empty_lines=N}) ->
-       error_terminate(400, State);
-request({http_error, <<"\r\n">>}, State=#state{req_empty_lines=N}) ->
-       parse_request(State#state{req_empty_lines=N + 1});
-request(_Any, State) ->
-       error_terminate(400, State).
-
--spec parse_header(#http_req{}, #state{}) -> ok | none().
-parse_header(Req, State=#state{buffer=Buffer, max_line_length=MaxLength}) ->
-       case erlang:decode_packet(httph_bin, Buffer, []) of
-               {ok, Header, Rest} -> header(Header, Req, State#state{buffer=Rest});
-               {more, _Length} when byte_size(Buffer) > MaxLength ->
-                       error_terminate(413, State);
-               {more, _Length} -> wait_header(Req, State);
-               {error, _Reason} -> error_terminate(400, State)
-       end.
-
--spec wait_header(#http_req{}, #state{}) -> ok | none().
-wait_header(Req, State=#state{socket=Socket,
-               transport=Transport, timeout=T, buffer=Buffer}) ->
-       case Transport:recv(Socket, 0, T) of
-               {ok, Data} -> parse_header(Req, State#state{
-                       buffer= << Buffer/binary, Data/binary >>});
-               {error, timeout} -> error_terminate(408, State);
-               {error, closed} -> terminate(State)
-       end.
-
--spec header({http_header, integer(), cowboy_http:header(), any(), binary()}
-       | http_eoh, #http_req{}, #state{}) -> ok | none().
-header({http_header, _I, 'Host', _R, RawHost}, Req=#http_req{
-               transport=Transport, host=undefined}, State) ->
-       RawHost2 = cowboy_bstr:to_lower(RawHost),
-       case catch cowboy_dispatcher:split_host(RawHost2) of
-               {Host, RawHost3, undefined} ->
-                       Port = default_port(Transport:name()),
-                       dispatch(fun parse_header/2, Req#http_req{
-                               host=Host, raw_host=RawHost3, port=Port,
-                               headers=[{'Host', RawHost3}|Req#http_req.headers]}, State);
-               {Host, RawHost3, Port} ->
-                       dispatch(fun parse_header/2, Req#http_req{
-                               host=Host, raw_host=RawHost3, port=Port,
-                               headers=[{'Host', RawHost3}|Req#http_req.headers]}, State);
-               {'EXIT', _Reason} ->
-                       error_terminate(400, State)
-       end;
-%% Ignore Host headers if we already have it.
-header({http_header, _I, 'Host', _R, _V}, Req, State) ->
-       parse_header(Req, State);
-header({http_header, _I, 'Connection', _R, Connection},
-               Req=#http_req{headers=Headers}, State) ->
-       Req2 = Req#http_req{headers=[{'Connection', Connection}|Headers]},
-       {ConnTokens, Req3}
-               = cowboy_http_req:parse_header('Connection', Req2),
-       ConnAtom = cowboy_http:connection_to_atom(ConnTokens),
-       parse_header(Req3#http_req{connection=ConnAtom}, State);
-header({http_header, _I, Field, _R, Value}, Req, State) ->
-       Field2 = format_header(Field),
-       parse_header(Req#http_req{headers=[{Field2, Value}|Req#http_req.headers]},
-               State);
-%% The Host header is required in HTTP/1.1.
-header(http_eoh, #http_req{version={1, 1}, host=undefined}, State) ->
-       error_terminate(400, State);
-%% It is however optional in HTTP/1.0.
-header(http_eoh, Req=#http_req{version={1, 0}, transport=Transport,
-               host=undefined}, State=#state{buffer=Buffer}) ->
-       Port = default_port(Transport:name()),
-       dispatch(fun handler_init/2, Req#http_req{host=[], raw_host= <<>>,
-               port=Port, buffer=Buffer}, State#state{buffer= <<>>});
-header(http_eoh, Req, State=#state{buffer=Buffer}) ->
-       handler_init(Req#http_req{buffer=Buffer}, State#state{buffer= <<>>});
-header(_Any, _Req, State) ->
-       error_terminate(400, State).
-
--spec dispatch(fun((#http_req{}, #state{}) -> ok),
-       #http_req{}, #state{}) -> ok | none().
-dispatch(Next, Req=#http_req{host=Host, path=Path},
-               State=#state{dispatch=Dispatch}) ->
-       %% @todo We should allow a configurable chain of handlers here to
-       %%       allow things like url rewriting, site-wide authentication,
-       %%       optional dispatching, and more. It would default to what
-       %%       we are doing so far.
-       case cowboy_dispatcher:match(Host, Path, Dispatch) of
-               {ok, Handler, Opts, Binds, HostInfo, PathInfo} ->
-                       Next(Req#http_req{host_info=HostInfo, path_info=PathInfo,
-                               bindings=Binds}, State#state{handler={Handler, Opts}});
-               {error, notfound, host} ->
-                       error_terminate(400, State);
-               {error, notfound, path} ->
-                       error_terminate(404, State)
-       end.
-
--spec handler_init(#http_req{}, #state{}) -> ok | none().
-handler_init(Req, State=#state{transport=Transport,
-               handler={Handler, Opts}}) ->
-       try Handler:init({Transport:name(), http}, Req, Opts) of
-               {ok, Req2, HandlerState} ->
-                       handler_handle(HandlerState, Req2, State);
-               {loop, Req2, HandlerState} ->
-                       handler_before_loop(HandlerState, Req2, State);
-               {loop, Req2, HandlerState, hibernate} ->
-                       handler_before_loop(HandlerState, Req2,
-                               State#state{hibernate=true});
-               {loop, Req2, HandlerState, Timeout} ->
-                       handler_before_loop(HandlerState, Req2,
-                               State#state{loop_timeout=Timeout});
-               {loop, Req2, HandlerState, Timeout, hibernate} ->
-                       handler_before_loop(HandlerState, Req2,
-                               State#state{hibernate=true, loop_timeout=Timeout});
-               {shutdown, Req2, HandlerState} ->
-                       handler_terminate(HandlerState, Req2, State);
-               %% @todo {upgrade, transport, Module}
-               {upgrade, protocol, Module} ->
-                       upgrade_protocol(Req, State, Module)
-       catch Class:Reason ->
-               error_terminate(500, State),
-               error_logger:error_msg(
-                       "** Handler ~p terminating in init/3~n"
-                       "   for the reason ~p:~p~n"
-                       "** Options were ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, Opts, Req, erlang:get_stacktrace()])
-       end.
-
--spec upgrade_protocol(#http_req{}, #state{}, atom()) -> ok | none().
-upgrade_protocol(Req, State=#state{listener=ListenerPid,
-               handler={Handler, Opts}}, Module) ->
-       case Module:upgrade(ListenerPid, Handler, Opts, Req) of
-               {UpgradeRes, Req2} -> next_request(Req2, State, UpgradeRes);
-               _Any -> terminate(State)
-       end.
-
--spec handler_handle(any(), #http_req{}, #state{}) -> ok | none().
-handler_handle(HandlerState, Req, State=#state{handler={Handler, Opts}}) ->
-       try Handler:handle(Req, HandlerState) of
-               {ok, Req2, HandlerState2} ->
-                       terminate_request(HandlerState2, Req2, State)
-       catch Class:Reason ->
-               error_logger:error_msg(
-                       "** Handler ~p terminating in handle/2~n"
-                       "   for the reason ~p:~p~n"
-                       "** Options were ~p~n** Handler state was ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, Opts,
-                        HandlerState, Req, erlang:get_stacktrace()]),
-               handler_terminate(HandlerState, Req, State),
-               error_terminate(500, State)
-       end.
-
-%% We don't listen for Transport closes because that would force us
-%% to receive data and buffer it indefinitely.
--spec handler_before_loop(any(), #http_req{}, #state{}) -> ok | none().
-handler_before_loop(HandlerState, Req, State=#state{hibernate=true}) ->
-       State2 = handler_loop_timeout(State),
-       erlang:hibernate(?MODULE, handler_loop,
-               [HandlerState, Req, State2#state{hibernate=false}]);
-handler_before_loop(HandlerState, Req, State) ->
-       State2 = handler_loop_timeout(State),
-       handler_loop(HandlerState, Req, State2).
-
-%% Almost the same code can be found in cowboy_http_websocket.
--spec handler_loop_timeout(#state{}) -> #state{}.
-handler_loop_timeout(State=#state{loop_timeout=infinity}) ->
-       State#state{loop_timeout_ref=undefined};
-handler_loop_timeout(State=#state{loop_timeout=Timeout,
-               loop_timeout_ref=PrevRef}) ->
-       _ = case PrevRef of undefined -> ignore; PrevRef ->
-               erlang:cancel_timer(PrevRef) end,
-       TRef = make_ref(),
-       erlang:send_after(Timeout, self(), {?MODULE, timeout, TRef}),
-       State#state{loop_timeout_ref=TRef}.
-
--spec handler_loop(any(), #http_req{}, #state{}) -> ok | none().
-handler_loop(HandlerState, Req, State=#state{loop_timeout_ref=TRef}) ->
-       receive
-               {?MODULE, timeout, TRef} ->
-                       terminate_request(HandlerState, Req, State);
-               {?MODULE, timeout, OlderTRef} when is_reference(OlderTRef) ->
-                       handler_loop(HandlerState, Req, State);
-               Message ->
-                       handler_call(HandlerState, Req, State, Message)
-       end.
-
--spec handler_call(any(), #http_req{}, #state{}, any()) -> ok | none().
-handler_call(HandlerState, Req, State=#state{handler={Handler, Opts}},
-               Message) ->
-       try Handler:info(Message, Req, HandlerState) of
-               {ok, Req2, HandlerState2} ->
-                       terminate_request(HandlerState2, Req2, State);
-               {loop, Req2, HandlerState2} ->
-                       handler_before_loop(HandlerState2, Req2, State);
-               {loop, Req2, HandlerState2, hibernate} ->
-                       handler_before_loop(HandlerState2, Req2,
-                               State#state{hibernate=true})
-       catch Class:Reason ->
-               error_logger:error_msg(
-                       "** Handler ~p terminating in info/3~n"
-                       "   for the reason ~p:~p~n"
-                       "** Options were ~p~n** Handler state was ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, Opts,
-                        HandlerState, Req, erlang:get_stacktrace()]),
-               handler_terminate(HandlerState, Req, State),
-               error_terminate(500, State)
-       end.
-
--spec handler_terminate(any(), #http_req{}, #state{}) -> ok.
-handler_terminate(HandlerState, Req, #state{handler={Handler, Opts}}) ->
-       try
-               Handler:terminate(Req#http_req{resp_state=locked}, HandlerState)
-       catch Class:Reason ->
-               error_logger:error_msg(
-                       "** Handler ~p terminating in terminate/2~n"
-                       "   for the reason ~p:~p~n"
-                       "** Options were ~p~n** Handler state was ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, Opts,
-                        HandlerState, Req, erlang:get_stacktrace()])
-       end.
-
--spec terminate_request(any(), #http_req{}, #state{}) -> ok | none().
-terminate_request(HandlerState, Req, State) ->
-       HandlerRes = handler_terminate(HandlerState, Req, State),
-       next_request(Req, State, HandlerRes).
-
--spec next_request(#http_req{}, #state{}, any()) -> ok | none().
-next_request(Req=#http_req{connection=Conn},
-               State=#state{req_keepalive=Keepalive, max_keepalive=MaxKeepalive},
-               HandlerRes) ->
-       RespRes = ensure_response(Req),
-       {BodyRes, Buffer} = ensure_body_processed(Req),
-       %% Flush the resp_sent message before moving on.
-       receive {cowboy_http_req, resp_sent} -> ok after 0 -> ok end,
-       case {HandlerRes, BodyRes, RespRes, Conn} of
-               {ok, ok, ok, keepalive} when Keepalive < MaxKeepalive ->
-                       ?MODULE:parse_request(State#state{
-                               buffer=Buffer, req_empty_lines=0,
-                               req_keepalive=Keepalive + 1});
-               _Closed ->
-                       terminate(State)
-       end.
-
--spec ensure_body_processed(#http_req{}) -> {ok | close, binary()}.
-ensure_body_processed(#http_req{body_state=done, buffer=Buffer}) ->
-       {ok, Buffer};
-ensure_body_processed(Req=#http_req{body_state=waiting}) ->
-       case cowboy_http_req:body(Req) of
-               {error, badarg} -> {ok, Req#http_req.buffer}; %% No body.
-               {error, _Reason} -> {close, <<>>};
-               {ok, _, Req2} -> {ok, Req2#http_req.buffer}
-       end;
-ensure_body_processed(Req=#http_req{body_state={multipart, _, _}}) ->
-       {ok, Req2} = cowboy_http_req:multipart_skip(Req),
-       ensure_body_processed(Req2).
-
--spec ensure_response(#http_req{}) -> ok.
-%% The handler has already fully replied to the client.
-ensure_response(#http_req{resp_state=done}) ->
-       ok;
-%% No response has been sent but everything apparently went fine.
-%% Reply with 204 No Content to indicate this.
-ensure_response(Req=#http_req{resp_state=waiting}) ->
-       _ = cowboy_http_req:reply(204, [], [], Req),
-       ok;
-%% Close the chunked reply.
-ensure_response(#http_req{method='HEAD', resp_state=chunks}) ->
-       close;
-ensure_response(#http_req{socket=Socket, transport=Transport,
-               resp_state=chunks}) ->
-       Transport:send(Socket, <<"0\r\n\r\n">>),
-       close.
-
-%% Only send an error reply if there is no resp_sent message.
--spec error_terminate(cowboy_http:status(), #state{}) -> ok.
-error_terminate(Code, State=#state{socket=Socket, transport=Transport}) ->
-       receive
-               {cowboy_http_req, resp_sent} -> ok
-       after 0 ->
-               _ = cowboy_http_req:reply(Code, #http_req{
-                       socket=Socket, transport=Transport,
-                       connection=close, pid=self(), resp_state=waiting}),
-               ok
-       end,
-       terminate(State).
-
--spec terminate(#state{}) -> ok.
-terminate(#state{socket=Socket, transport=Transport}) ->
-       Transport:close(Socket),
-       ok.
-
-%% Internal.
-
--spec version_to_connection(cowboy_http:version()) -> keepalive | close.
-version_to_connection({1, 1}) -> keepalive;
-version_to_connection(_Any) -> close.
-
--spec default_port(atom()) -> 80 | 443.
-default_port(ssl) -> 443;
-default_port(_) -> 80.
-
-%% @todo While 32 should be enough for everybody, we should probably make
-%%       this configurable or something.
--spec format_header(atom()) -> atom(); (binary()) -> binary().
-format_header(Field) when is_atom(Field) ->
-       Field;
-format_header(Field) when byte_size(Field) =< 20; byte_size(Field) > 32 ->
-       Field;
-format_header(Field) ->
-       format_header(Field, true, <<>>).
-
-format_header(<<>>, _Any, Acc) ->
-       Acc;
-%% Replicate a bug in OTP for compatibility reasons when there's a - right
-%% after another. Proper use should always be 'true' instead of 'not Bool'.
-format_header(<< $-, Rest/bits >>, Bool, Acc) ->
-       format_header(Rest, not Bool, << Acc/binary, $- >>);
-format_header(<< C, Rest/bits >>, true, Acc) ->
-       format_header(Rest, false, << Acc/binary, (cowboy_bstr:char_to_upper(C)) >>);
-format_header(<< C, Rest/bits >>, false, Acc) ->
-       format_header(Rest, false, << Acc/binary, (cowboy_bstr:char_to_lower(C)) >>).
-
-%% Tests.
-
--ifdef(TEST).
-
-format_header_test_() ->
-       %% {Header, Result}
-       Tests = [
-               {<<"Sec-Websocket-Version">>, <<"Sec-Websocket-Version">>},
-               {<<"Sec-WebSocket-Version">>, <<"Sec-Websocket-Version">>},
-               {<<"sec-websocket-version">>, <<"Sec-Websocket-Version">>},
-               {<<"SEC-WEBSOCKET-VERSION">>, <<"Sec-Websocket-Version">>},
-               %% These last tests ensures we're formatting headers exactly like OTP.
-               %% Even though it's dumb, it's better for compatibility reasons.
-               {<<"Sec-WebSocket--Version">>, <<"Sec-Websocket--version">>},
-               {<<"Sec-WebSocket---Version">>, <<"Sec-Websocket---Version">>}
-       ],
-       [{H, fun() -> R = format_header(H) end} || {H, R} <- Tests].
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl
deleted file mode 100644 (file)
index dd772df..0000000
+++ /dev/null
@@ -1,820 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc HTTP request manipulation API.
-%%
-%% Almost all functions in this module return a new <em>Req</em> variable.
-%% It should always be used instead of the one used in your function call
-%% because it keeps the state of the request. It also allows Cowboy to do
-%% some lazy evaluation and cache results where possible.
--module(cowboy_http_req).
-
--export([
-       method/1, version/1, peer/1, peer_addr/1,
-       host/1, host_info/1, raw_host/1, port/1,
-       path/1, path_info/1, raw_path/1,
-       qs_val/2, qs_val/3, qs_vals/1, raw_qs/1,
-       binding/2, binding/3, bindings/1,
-       header/2, header/3, headers/1,
-       parse_header/2, parse_header/3,
-       cookie/2, cookie/3, cookies/1,
-       meta/2, meta/3
-]). %% Request API.
-
--export([
-       body/1, body/2, body_qs/1,
-       multipart_data/1, multipart_skip/1
-]). %% Request Body API.
-
--export([
-       set_resp_cookie/4, set_resp_header/3, set_resp_body/2,
-       set_resp_body_fun/3, has_resp_header/2, has_resp_body/1,
-       reply/2, reply/3, reply/4,
-       chunked_reply/2, chunked_reply/3, chunk/2,
-       upgrade_reply/3
-]). %% Response API.
-
--export([
-       compact/1, transport/1
-]). %% Misc API.
-
--include("include/http.hrl").
--include_lib("eunit/include/eunit.hrl").
-
-%% Request API.
-
-%% @doc Return the HTTP method of the request.
--spec method(#http_req{}) -> {cowboy_http:method(), #http_req{}}.
-method(Req) ->
-       {Req#http_req.method, Req}.
-
-%% @doc Return the HTTP version used for the request.
--spec version(#http_req{}) -> {cowboy_http:version(), #http_req{}}.
-version(Req) ->
-       {Req#http_req.version, Req}.
-
-%% @doc Return the peer address and port number of the remote host.
--spec peer(#http_req{}) -> {{inet:ip_address(), inet:ip_port()}, #http_req{}}.
-peer(Req=#http_req{socket=Socket, transport=Transport, peer=undefined}) ->
-       {ok, Peer} = Transport:peername(Socket),
-       {Peer, Req#http_req{peer=Peer}};
-peer(Req) ->
-       {Req#http_req.peer, Req}.
-
-%% @doc Returns the peer address calculated from headers.
--spec peer_addr(#http_req{}) -> {inet:ip_address(), #http_req{}}.
-peer_addr(Req = #http_req{}) ->
-       {RealIp, Req1} = header(<<"X-Real-Ip">>, Req),
-       {ForwardedForRaw, Req2} = header(<<"X-Forwarded-For">>, Req1),
-       {{PeerIp, _PeerPort}, Req3} = peer(Req2),
-       ForwardedFor = case ForwardedForRaw of
-               undefined ->
-                       undefined;
-               ForwardedForRaw ->
-                       case re:run(ForwardedForRaw, "^(?<first_ip>[^\\,]+)",
-                                       [{capture, [first_ip], binary}]) of
-                               {match, [FirstIp]} -> FirstIp;
-                               _Any -> undefined
-                       end
-       end,
-       {ok, PeerAddr} = if
-               is_binary(RealIp) -> inet_parse:address(binary_to_list(RealIp));
-               is_binary(ForwardedFor) -> inet_parse:address(binary_to_list(ForwardedFor));
-               true -> {ok, PeerIp}
-       end,
-       {PeerAddr, Req3}.
-
-%% @doc Return the tokens for the hostname requested.
--spec host(#http_req{}) -> {cowboy_dispatcher:tokens(), #http_req{}}.
-host(Req) ->
-       {Req#http_req.host, Req}.
-
-%% @doc Return the extra host information obtained from partially matching
-%% the hostname using <em>'...'</em>.
--spec host_info(#http_req{})
-       -> {cowboy_dispatcher:tokens() | undefined, #http_req{}}.
-host_info(Req) ->
-       {Req#http_req.host_info, Req}.
-
-%% @doc Return the raw host directly taken from the request.
--spec raw_host(#http_req{}) -> {binary(), #http_req{}}.
-raw_host(Req) ->
-       {Req#http_req.raw_host, Req}.
-
-%% @doc Return the port used for this request.
--spec port(#http_req{}) -> {inet:ip_port(), #http_req{}}.
-port(Req) ->
-       {Req#http_req.port, Req}.
-
-%% @doc Return the path segments for the path requested.
-%%
-%% Following RFC2396, this function may return path segments containing any
-%% character, including <em>/</em> if, and only if, a <em>/</em> was escaped
-%% and part of a path segment in the path requested.
--spec path(#http_req{}) -> {cowboy_dispatcher:tokens(), #http_req{}}.
-path(Req) ->
-       {Req#http_req.path, Req}.
-
-%% @doc Return the extra path information obtained from partially matching
-%% the patch using <em>'...'</em>.
--spec path_info(#http_req{})
-       -> {cowboy_dispatcher:tokens() | undefined, #http_req{}}.
-path_info(Req) ->
-       {Req#http_req.path_info, Req}.
-
-%% @doc Return the raw path directly taken from the request.
--spec raw_path(#http_req{}) -> {binary(), #http_req{}}.
-raw_path(Req) ->
-       {Req#http_req.raw_path, Req}.
-
-%% @equiv qs_val(Name, Req, undefined)
--spec qs_val(binary(), #http_req{})
-       -> {binary() | true | undefined, #http_req{}}.
-qs_val(Name, Req) when is_binary(Name) ->
-       qs_val(Name, Req, undefined).
-
-%% @doc Return the query string value for the given key, or a default if
-%% missing.
-qs_val(Name, Req=#http_req{raw_qs=RawQs, qs_vals=undefined,
-               urldecode={URLDecFun, URLDecArg}}, Default) when is_binary(Name) ->
-       QsVals = parse_qs(RawQs, fun(Bin) -> URLDecFun(Bin, URLDecArg) end),
-       qs_val(Name, Req#http_req{qs_vals=QsVals}, Default);
-qs_val(Name, Req, Default) ->
-       case lists:keyfind(Name, 1, Req#http_req.qs_vals) of
-               {Name, Value} -> {Value, Req};
-               false -> {Default, Req}
-       end.
-
-%% @doc Return the full list of query string values.
--spec qs_vals(#http_req{}) -> {list({binary(), binary() | true}), #http_req{}}.
-qs_vals(Req=#http_req{raw_qs=RawQs, qs_vals=undefined,
-               urldecode={URLDecFun, URLDecArg}}) ->
-       QsVals = parse_qs(RawQs, fun(Bin) -> URLDecFun(Bin, URLDecArg) end),
-       qs_vals(Req#http_req{qs_vals=QsVals});
-qs_vals(Req=#http_req{qs_vals=QsVals}) ->
-       {QsVals, Req}.
-
-%% @doc Return the raw query string directly taken from the request.
--spec raw_qs(#http_req{}) -> {binary(), #http_req{}}.
-raw_qs(Req) ->
-       {Req#http_req.raw_qs, Req}.
-
-%% @equiv binding(Name, Req, undefined)
--spec binding(atom(), #http_req{}) -> {binary() | undefined, #http_req{}}.
-binding(Name, Req) when is_atom(Name) ->
-       binding(Name, Req, undefined).
-
-%% @doc Return the binding value for the given key obtained when matching
-%% the host and path against the dispatch list, or a default if missing.
-binding(Name, Req, Default) when is_atom(Name) ->
-       case lists:keyfind(Name, 1, Req#http_req.bindings) of
-               {Name, Value} -> {Value, Req};
-               false -> {Default, Req}
-       end.
-
-%% @doc Return the full list of binding values.
--spec bindings(#http_req{}) -> {list({atom(), binary()}), #http_req{}}.
-bindings(Req) ->
-       {Req#http_req.bindings, Req}.
-
-%% @equiv header(Name, Req, undefined)
--spec header(atom() | binary(), #http_req{})
-       -> {binary() | undefined, #http_req{}}.
-header(Name, Req) when is_atom(Name) orelse is_binary(Name) ->
-       header(Name, Req, undefined).
-
-%% @doc Return the header value for the given key, or a default if missing.
-header(Name, Req, Default) when is_atom(Name) orelse is_binary(Name) ->
-       case lists:keyfind(Name, 1, Req#http_req.headers) of
-               {Name, Value} -> {Value, Req};
-               false -> {Default, Req}
-       end.
-
-%% @doc Return the full list of headers.
--spec headers(#http_req{}) -> {cowboy_http:headers(), #http_req{}}.
-headers(Req) ->
-       {Req#http_req.headers, Req}.
-
-%% @doc Semantically parse headers.
-%%
-%% When the value isn't found, a proper default value for the type
-%% returned is used as a return value.
-%% @see parse_header/3
--spec parse_header(cowboy_http:header(), #http_req{})
-       -> {any(), #http_req{}} | {error, badarg}.
-parse_header(Name, Req=#http_req{p_headers=PHeaders}) ->
-       case lists:keyfind(Name, 1, PHeaders) of
-               false -> parse_header(Name, Req, parse_header_default(Name));
-               {Name, Value} -> {Value, Req}
-       end.
-
-%% @doc Default values for semantic header parsing.
--spec parse_header_default(cowboy_http:header()) -> any().
-parse_header_default('Connection') -> [];
-parse_header_default(_Name) -> undefined.
-
-%% @doc Semantically parse headers.
-%%
-%% When the header is unknown, the value is returned directly without parsing.
--spec parse_header(cowboy_http:header(), #http_req{}, any())
-       -> {any(), #http_req{}} | {error, badarg}.
-parse_header(Name, Req, Default) when Name =:= 'Accept' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:list(Value, fun cowboy_http:media_range/2)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Accept-Charset' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:nonempty_list(Value, fun cowboy_http:conneg/2)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Accept-Encoding' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:list(Value, fun cowboy_http:conneg/2)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Accept-Language' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:nonempty_list(Value, fun cowboy_http:language_range/2)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Connection' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Content-Length' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:digits(Value)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Content-Type' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:content_type(Value)
-               end);
-parse_header(Name, Req, Default)
-               when Name =:= 'If-Match'; Name =:= 'If-None-Match' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:entity_tag_match(Value)
-               end);
-parse_header(Name, Req, Default)
-               when Name =:= 'If-Modified-Since'; Name =:= 'If-Unmodified-Since' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:http_date(Value)
-               end);
-parse_header(Name, Req, Default) when Name =:= 'Upgrade' ->
-       parse_header(Name, Req, Default,
-               fun (Value) ->
-                       cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2)
-               end);
-parse_header(Name, Req, Default) when Name =:= <<"sec-websocket-protocol">> ->
-    parse_header(Name, Req, Default,
-        fun (Value) ->
-            cowboy_http:nonempty_list(Value, fun cowboy_http:token/2)
-        end);
-parse_header(Name, Req, Default) ->
-       {Value, Req2} = header(Name, Req, Default),
-       {undefined, Value, Req2}.
-
-parse_header(Name, Req=#http_req{p_headers=PHeaders}, Default, Fun) ->
-       case header(Name, Req) of
-               {undefined, Req2} ->
-                       {Default, Req2#http_req{p_headers=[{Name, Default}|PHeaders]}};
-               {Value, Req2} ->
-                       case Fun(Value) of
-                               {error, badarg} ->
-                                       {error, badarg};
-                               P ->
-                                       {P, Req2#http_req{p_headers=[{Name, P}|PHeaders]}}
-                       end
-       end.
-
-%% @equiv cookie(Name, Req, undefined)
--spec cookie(binary(), #http_req{})
-       -> {binary() | true | undefined, #http_req{}}.
-cookie(Name, Req) when is_binary(Name) ->
-       cookie(Name, Req, undefined).
-
-%% @doc Return the cookie value for the given key, or a default if
-%% missing.
-cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) ->
-       case header('Cookie', Req) of
-               {undefined, Req2} ->
-                       {Default, Req2#http_req{cookies=[]}};
-               {RawCookie, Req2} ->
-                       Cookies = cowboy_cookies:parse_cookie(RawCookie),
-                       cookie(Name, Req2#http_req{cookies=Cookies}, Default)
-       end;
-cookie(Name, Req, Default) ->
-       case lists:keyfind(Name, 1, Req#http_req.cookies) of
-               {Name, Value} -> {Value, Req};
-               false -> {Default, Req}
-       end.
-
-%% @doc Return the full list of cookie values.
--spec cookies(#http_req{}) -> {list({binary(), binary() | true}), #http_req{}}.
-cookies(Req=#http_req{cookies=undefined}) ->
-       case header('Cookie', Req) of
-               {undefined, Req2} ->
-                       {[], Req2#http_req{cookies=[]}};
-               {RawCookie, Req2} ->
-                       Cookies = cowboy_cookies:parse_cookie(RawCookie),
-                       cookies(Req2#http_req{cookies=Cookies})
-       end;
-cookies(Req=#http_req{cookies=Cookies}) ->
-       {Cookies, Req}.
-
-%% @equiv meta(Name, Req, undefined)
--spec meta(atom(), #http_req{}) -> {any() | undefined, #http_req{}}.
-meta(Name, Req) ->
-       meta(Name, Req, undefined).
-
-%% @doc Return metadata information about the request.
-%%
-%% Metadata information varies from one protocol to another. Websockets
-%% would define the protocol version here, while REST would use it to
-%% indicate which media type, language and charset were retained.
--spec meta(atom(), #http_req{}, any()) -> {any(), #http_req{}}.
-meta(Name, Req, Default) ->
-       case lists:keyfind(Name, 1, Req#http_req.meta) of
-               {Name, Value} -> {Value, Req};
-               false -> {Default, Req}
-       end.
-
-%% Request Body API.
-
-%% @doc Return the full body sent with the request, or <em>{error, badarg}</em>
-%% if no <em>Content-Length</em> is available.
-%% @todo We probably want to allow a max length.
-%% @todo Add multipart support to this function.
--spec body(#http_req{}) -> {ok, binary(), #http_req{}} | {error, atom()}.
-body(Req) ->
-       {Length, Req2} = cowboy_http_req:parse_header('Content-Length', Req),
-       case Length of
-               undefined -> {error, badarg};
-               {error, badarg} -> {error, badarg};
-               _Any ->
-                       body(Length, Req2)
-       end.
-
-%% @doc Return <em>Length</em> bytes of the request body.
-%%
-%% You probably shouldn't be calling this function directly, as it expects the
-%% <em>Length</em> argument to be the full size of the body, and will consider
-%% the body to be fully read from the socket.
-%% @todo We probably want to configure the timeout.
--spec body(non_neg_integer(), #http_req{})
-       -> {ok, binary(), #http_req{}} | {error, atom()}.
-body(Length, Req=#http_req{body_state=waiting, buffer=Buffer})
-               when is_integer(Length) andalso Length =< byte_size(Buffer) ->
-       << Body:Length/binary, Rest/bits >> = Buffer,
-       {ok, Body, Req#http_req{body_state=done, buffer=Rest}};
-body(Length, Req=#http_req{socket=Socket, transport=Transport,
-               body_state=waiting, buffer=Buffer}) ->
-       case Transport:recv(Socket, Length - byte_size(Buffer), 5000) of
-               {ok, Body} -> {ok, << Buffer/binary, Body/binary >>,
-                       Req#http_req{body_state=done, buffer= <<>>}};
-               {error, Reason} -> {error, Reason}
-       end.
-
-%% @doc Return the full body sent with the reqest, parsed as an
-%% application/x-www-form-urlencoded string. Essentially a POST query string.
--spec body_qs(#http_req{}) -> {list({binary(), binary() | true}), #http_req{}}.
-body_qs(Req=#http_req{urldecode={URLDecFun, URLDecArg}}) ->
-       {ok, Body, Req2} = body(Req),
-       {parse_qs(Body, fun(Bin) -> URLDecFun(Bin, URLDecArg) end), Req2}.
-
-%% Multipart Request API.
-
-%% @doc Return data from the multipart parser.
-%%
-%% Use this function for multipart streaming. For each part in the request,
-%% this function returns <em>{headers, Headers}</em> followed by a sequence of
-%% <em>{data, Data}</em> tuples and finally <em>end_of_part</em>. When there
-%% is no part to parse anymore, <em>eof</em> is returned.
-%%
-%% If the request Content-Type is not a multipart one, <em>{error, badarg}</em>
-%% is returned.
--spec multipart_data(#http_req{})
-               -> {{headers, cowboy_http:headers()}
-                               | {data, binary()} | end_of_part | eof,
-                       #http_req{}}.
-multipart_data(Req=#http_req{body_state=waiting}) ->
-       {{<<"multipart">>, _SubType, Params}, Req2} =
-               parse_header('Content-Type', Req),
-       {_, Boundary} = lists:keyfind(<<"boundary">>, 1, Params),
-       {Length, Req3=#http_req{buffer=Buffer}} =
-               parse_header('Content-Length', Req2),
-       multipart_data(Req3, Length, cowboy_multipart:parser(Boundary), Buffer);
-multipart_data(Req=#http_req{body_state={multipart, Length, Cont}}) ->
-       multipart_data(Req, Length, Cont());
-multipart_data(Req=#http_req{body_state=done}) ->
-       {eof, Req}.
-
-multipart_data(Req, Length, Parser, Buffer) when byte_size(Buffer) >= Length ->
-       << Data:Length/binary, Rest/binary >> = Buffer,
-       multipart_data(Req#http_req{buffer=Rest}, 0, Parser(Data));
-multipart_data(Req, Length, Parser, Buffer) ->
-       NewLength = Length - byte_size(Buffer),
-       multipart_data(Req#http_req{buffer= <<>>}, NewLength, Parser(Buffer)).
-
-multipart_data(Req, Length, {headers, Headers, Cont}) ->
-       {{headers, Headers}, Req#http_req{body_state={multipart, Length, Cont}}};
-multipart_data(Req, Length, {body, Data, Cont}) ->
-       {{body, Data}, Req#http_req{body_state={multipart, Length, Cont}}};
-multipart_data(Req, Length, {end_of_part, Cont}) ->
-       {end_of_part, Req#http_req{body_state={multipart, Length, Cont}}};
-multipart_data(Req, 0, eof) ->
-       {eof, Req#http_req{body_state=done}};
-multipart_data(Req=#http_req{socket=Socket, transport=Transport},
-               Length, eof) ->
-       {ok, _Data} = Transport:recv(Socket, Length, 5000),
-       {eof, Req#http_req{body_state=done}};
-multipart_data(Req=#http_req{socket=Socket, transport=Transport},
-               Length, {more, Parser}) when Length > 0 ->
-       case Transport:recv(Socket, 0, 5000) of
-               {ok, << Data:Length/binary, Buffer/binary >>} ->
-                       multipart_data(Req#http_req{buffer=Buffer}, 0, Parser(Data));
-               {ok, Data} ->
-                       multipart_data(Req, Length - byte_size(Data), Parser(Data))
-       end.
-
-%% @doc Skip a part returned by the multipart parser.
-%%
-%% This function repeatedly calls <em>multipart_data/1</em> until
-%% <em>end_of_part</em> or <em>eof</em> is parsed.
-multipart_skip(Req) ->
-       case multipart_data(Req) of
-               {end_of_part, Req2} -> {ok, Req2};
-               {eof, Req2} -> {ok, Req2};
-               {_Other, Req2} -> multipart_skip(Req2)
-       end.
-
-%% Response API.
-
-%% @doc Add a cookie header to the response.
--spec set_resp_cookie(binary(), binary(), [cowboy_cookies:cookie_option()],
-       #http_req{}) -> {ok, #http_req{}}.
-set_resp_cookie(Name, Value, Options, Req) ->
-       {HeaderName, HeaderValue} = cowboy_cookies:cookie(Name, Value, Options),
-       set_resp_header(HeaderName, HeaderValue, Req).
-
-%% @doc Add a header to the response.
-set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) ->
-       NameBin = header_to_binary(Name),
-       {ok, Req#http_req{resp_headers=[{NameBin, Value}|RespHeaders]}}.
-
-%% @doc Add a body to the response.
-%%
-%% The body set here is ignored if the response is later sent using
-%% anything other than reply/2 or reply/3. The response body is expected
-%% to be a binary or an iolist.
-set_resp_body(Body, Req) ->
-       {ok, Req#http_req{resp_body=Body}}.
-
-
-%% @doc Add a body function to the response.
-%%
-%% The response body may also be set to a content-length - stream-function pair.
-%% If the response body is of this type normal response headers will be sent.
-%% After the response headers has been sent the body function is applied.
-%% The body function is expected to write the response body directly to the
-%% socket using the transport module.
-%%
-%% If the body function crashes while writing the response body or writes fewer
-%% bytes than declared the behaviour is undefined. The body set here is ignored
-%% if the response is later sent using anything other than `reply/2' or
-%% `reply/3'.
-%%
-%% @see cowboy_http_req:transport/1.
--spec set_resp_body_fun(non_neg_integer(), fun(() -> {sent, non_neg_integer()}),
-               #http_req{}) -> {ok, #http_req{}}.
-set_resp_body_fun(StreamLen, StreamFun, Req) ->
-       {ok, Req#http_req{resp_body={StreamLen, StreamFun}}}.
-
-
-%% @doc Return whether the given header has been set for the response.
-has_resp_header(Name, #http_req{resp_headers=RespHeaders}) ->
-       NameBin = header_to_binary(Name),
-       lists:keymember(NameBin, 1, RespHeaders).
-
-%% @doc Return whether a body has been set for the response.
-has_resp_body(#http_req{resp_body={Length, _}}) ->
-       Length > 0;
-has_resp_body(#http_req{resp_body=RespBody}) ->
-       iolist_size(RespBody) > 0.
-
-%% @equiv reply(Status, [], [], Req)
--spec reply(cowboy_http:status(), #http_req{}) -> {ok, #http_req{}}.
-reply(Status, Req=#http_req{resp_body=Body}) ->
-       reply(Status, [], Body, Req).
-
-%% @equiv reply(Status, Headers, [], Req)
--spec reply(cowboy_http:status(), cowboy_http:headers(), #http_req{})
-       -> {ok, #http_req{}}.
-reply(Status, Headers, Req=#http_req{resp_body=Body}) ->
-       reply(Status, Headers, Body, Req).
-
-%% @doc Send a reply to the client.
-reply(Status, Headers, Body, Req=#http_req{socket=Socket,
-               transport=Transport, connection=Connection, pid=ReqPid,
-               method=Method, resp_state=waiting, resp_headers=RespHeaders}) ->
-       RespConn = response_connection(Headers, Connection),
-       ContentLen = case Body of {CL, _} -> CL; _ -> iolist_size(Body) end,
-       Head = response_head(Status, Headers, RespHeaders, [
-               {<<"Connection">>, atom_to_connection(Connection)},
-               {<<"Content-Length">>, integer_to_list(ContentLen)},
-               {<<"Date">>, cowboy_clock:rfc1123()},
-               {<<"Server">>, <<"Cowboy">>}
-       ]),
-       case {Method, Body} of
-               {'HEAD', _} -> Transport:send(Socket, Head);
-               {_, {_, StreamFun}} -> Transport:send(Socket, Head), StreamFun();
-               {_, _} -> Transport:send(Socket, [Head, Body])
-       end,
-       ReqPid ! {?MODULE, resp_sent},
-       {ok, Req#http_req{connection=RespConn, resp_state=done,
-               resp_headers=[], resp_body= <<>>}}.
-
-%% @equiv chunked_reply(Status, [], Req)
--spec chunked_reply(cowboy_http:status(), #http_req{}) -> {ok, #http_req{}}.
-chunked_reply(Status, Req) ->
-       chunked_reply(Status, [], Req).
-
-%% @doc Initiate the sending of a chunked reply to the client.
-%% @see cowboy_http_req:chunk/2
--spec chunked_reply(cowboy_http:status(), cowboy_http:headers(), #http_req{})
-       -> {ok, #http_req{}}.
-chunked_reply(Status, Headers, Req=#http_req{socket=Socket,
-               transport=Transport, connection=Connection, pid=ReqPid,
-               resp_state=waiting, resp_headers=RespHeaders}) ->
-       RespConn = response_connection(Headers, Connection),
-       Head = response_head(Status, Headers, RespHeaders, [
-               {<<"Connection">>, atom_to_connection(Connection)},
-               {<<"Transfer-Encoding">>, <<"chunked">>},
-               {<<"Date">>, cowboy_clock:rfc1123()},
-               {<<"Server">>, <<"Cowboy">>}
-       ]),
-       Transport:send(Socket, Head),
-       ReqPid ! {?MODULE, resp_sent},
-       {ok, Req#http_req{connection=RespConn, resp_state=chunks,
-               resp_headers=[], resp_body= <<>>}}.
-
-%% @doc Send a chunk of data.
-%%
-%% A chunked reply must have been initiated before calling this function.
-chunk(_Data, #http_req{socket=_Socket, transport=_Transport, method='HEAD'}) ->
-       ok;
-chunk(Data, #http_req{socket=Socket, transport=Transport, resp_state=chunks}) ->
-       Transport:send(Socket, [erlang:integer_to_list(iolist_size(Data), 16),
-               <<"\r\n">>, Data, <<"\r\n">>]).
-
-%% @doc Send an upgrade reply.
-%% @private
--spec upgrade_reply(cowboy_http:status(), cowboy_http:headers(), #http_req{})
-       -> {ok, #http_req{}}.
-upgrade_reply(Status, Headers, Req=#http_req{socket=Socket, transport=Transport,
-               pid=ReqPid, resp_state=waiting, resp_headers=RespHeaders}) ->
-       Head = response_head(Status, Headers, RespHeaders, [
-               {<<"Connection">>, <<"Upgrade">>}
-       ]),
-       Transport:send(Socket, Head),
-       ReqPid ! {?MODULE, resp_sent},
-       {ok, Req#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}.
-
-%% Misc API.
-
-%% @doc Compact the request data by removing all non-system information.
-%%
-%% This essentially removes the host, path, query string, bindings and headers.
-%% Use it when you really need to save up memory, for example when having
-%% many concurrent long-running connections.
--spec compact(#http_req{}) -> #http_req{}.
-compact(Req) ->
-       Req#http_req{host=undefined, host_info=undefined, path=undefined,
-               path_info=undefined, qs_vals=undefined,
-               bindings=undefined, headers=[],
-               p_headers=[], cookies=[]}.
-
-%% @doc Return the transport module and socket associated with a request.
-%%
-%% This exposes the same socket interface used internally by the HTTP protocol
-%% implementation to developers that needs low level access to the socket.
-%%
-%% It is preferred to use this in conjuction with the stream function support
-%% in `set_resp_body_fun/3' if this is used to write a response body directly
-%% to the socket. This ensures that the response headers are set correctly.
--spec transport(#http_req{}) -> {ok, module(), inet:socket()}.
-transport(#http_req{transport=Transport, socket=Socket}) ->
-       {ok, Transport, Socket}.
-
-%% Internal.
-
--spec parse_qs(binary(), fun((binary()) -> binary())) ->
-               list({binary(), binary() | true}).
-parse_qs(<<>>, _URLDecode) ->
-       [];
-parse_qs(Qs, URLDecode) ->
-       Tokens = binary:split(Qs, <<"&">>, [global, trim]),
-       [case binary:split(Token, <<"=">>) of
-               [Token] -> {URLDecode(Token), true};
-               [Name, Value] -> {URLDecode(Name), URLDecode(Value)}
-       end || Token <- Tokens].
-
--spec response_connection(cowboy_http:headers(), keepalive | close)
-       -> keepalive | close.
-response_connection([], Connection) ->
-       Connection;
-response_connection([{Name, Value}|Tail], Connection) ->
-       case Name of
-               'Connection' -> response_connection_parse(Value);
-               Name when is_atom(Name) -> response_connection(Tail, Connection);
-               Name ->
-                       Name2 = cowboy_bstr:to_lower(Name),
-                       case Name2 of
-                               <<"connection">> -> response_connection_parse(Value);
-                               _Any -> response_connection(Tail, Connection)
-                       end
-       end.
-
--spec response_connection_parse(binary()) -> keepalive | close.
-response_connection_parse(ReplyConn) ->
-       Tokens = cowboy_http:nonempty_list(ReplyConn, fun cowboy_http:token/2),
-       cowboy_http:connection_to_atom(Tokens).
-
--spec response_head(cowboy_http:status(), cowboy_http:headers(),
-       cowboy_http:headers(), cowboy_http:headers()) -> iolist().
-response_head(Status, Headers, RespHeaders, DefaultHeaders) ->
-       StatusLine = <<"HTTP/1.1 ", (status(Status))/binary, "\r\n">>,
-       Headers2 = [{header_to_binary(Key), Value} || {Key, Value} <- Headers],
-       Headers3 = merge_headers(
-               merge_headers(Headers2, RespHeaders),
-               DefaultHeaders),
-       Headers4 = [[Key, <<": ">>, Value, <<"\r\n">>]
-               || {Key, Value} <- Headers3],
-       [StatusLine, Headers4, <<"\r\n">>].
-
--spec merge_headers(cowboy_http:headers(), cowboy_http:headers())
-       -> cowboy_http:headers().
-merge_headers(Headers, []) ->
-       Headers;
-merge_headers(Headers, [{Name, Value}|Tail]) ->
-       Headers2 = case lists:keymember(Name, 1, Headers) of
-               true -> Headers;
-               false -> Headers ++ [{Name, Value}]
-       end,
-       merge_headers(Headers2, Tail).
-
--spec atom_to_connection(keepalive) -> <<_:80>>;
-                                               (close) -> <<_:40>>.
-atom_to_connection(keepalive) ->
-       <<"keep-alive">>;
-atom_to_connection(close) ->
-       <<"close">>.
-
--spec status(cowboy_http:status()) -> binary().
-status(100) -> <<"100 Continue">>;
-status(101) -> <<"101 Switching Protocols">>;
-status(102) -> <<"102 Processing">>;
-status(200) -> <<"200 OK">>;
-status(201) -> <<"201 Created">>;
-status(202) -> <<"202 Accepted">>;
-status(203) -> <<"203 Non-Authoritative Information">>;
-status(204) -> <<"204 No Content">>;
-status(205) -> <<"205 Reset Content">>;
-status(206) -> <<"206 Partial Content">>;
-status(207) -> <<"207 Multi-Status">>;
-status(226) -> <<"226 IM Used">>;
-status(300) -> <<"300 Multiple Choices">>;
-status(301) -> <<"301 Moved Permanently">>;
-status(302) -> <<"302 Found">>;
-status(303) -> <<"303 See Other">>;
-status(304) -> <<"304 Not Modified">>;
-status(305) -> <<"305 Use Proxy">>;
-status(306) -> <<"306 Switch Proxy">>;
-status(307) -> <<"307 Temporary Redirect">>;
-status(400) -> <<"400 Bad Request">>;
-status(401) -> <<"401 Unauthorized">>;
-status(402) -> <<"402 Payment Required">>;
-status(403) -> <<"403 Forbidden">>;
-status(404) -> <<"404 Not Found">>;
-status(405) -> <<"405 Method Not Allowed">>;
-status(406) -> <<"406 Not Acceptable">>;
-status(407) -> <<"407 Proxy Authentication Required">>;
-status(408) -> <<"408 Request Timeout">>;
-status(409) -> <<"409 Conflict">>;
-status(410) -> <<"410 Gone">>;
-status(411) -> <<"411 Length Required">>;
-status(412) -> <<"412 Precondition Failed">>;
-status(413) -> <<"413 Request Entity Too Large">>;
-status(414) -> <<"414 Request-URI Too Long">>;
-status(415) -> <<"415 Unsupported Media Type">>;
-status(416) -> <<"416 Requested Range Not Satisfiable">>;
-status(417) -> <<"417 Expectation Failed">>;
-status(418) -> <<"418 I'm a teapot">>;
-status(422) -> <<"422 Unprocessable Entity">>;
-status(423) -> <<"423 Locked">>;
-status(424) -> <<"424 Failed Dependency">>;
-status(425) -> <<"425 Unordered Collection">>;
-status(426) -> <<"426 Upgrade Required">>;
-status(500) -> <<"500 Internal Server Error">>;
-status(501) -> <<"501 Not Implemented">>;
-status(502) -> <<"502 Bad Gateway">>;
-status(503) -> <<"503 Service Unavailable">>;
-status(504) -> <<"504 Gateway Timeout">>;
-status(505) -> <<"505 HTTP Version Not Supported">>;
-status(506) -> <<"506 Variant Also Negotiates">>;
-status(507) -> <<"507 Insufficient Storage">>;
-status(510) -> <<"510 Not Extended">>;
-status(B) when is_binary(B) -> B.
-
--spec header_to_binary(cowboy_http:header()) -> binary().
-header_to_binary('Cache-Control') -> <<"Cache-Control">>;
-header_to_binary('Connection') -> <<"Connection">>;
-header_to_binary('Date') -> <<"Date">>;
-header_to_binary('Pragma') -> <<"Pragma">>;
-header_to_binary('Transfer-Encoding') -> <<"Transfer-Encoding">>;
-header_to_binary('Upgrade') -> <<"Upgrade">>;
-header_to_binary('Via') -> <<"Via">>;
-header_to_binary('Accept') -> <<"Accept">>;
-header_to_binary('Accept-Charset') -> <<"Accept-Charset">>;
-header_to_binary('Accept-Encoding') -> <<"Accept-Encoding">>;
-header_to_binary('Accept-Language') -> <<"Accept-Language">>;
-header_to_binary('Authorization') -> <<"Authorization">>;
-header_to_binary('From') -> <<"From">>;
-header_to_binary('Host') -> <<"Host">>;
-header_to_binary('If-Modified-Since') -> <<"If-Modified-Since">>;
-header_to_binary('If-Match') -> <<"If-Match">>;
-header_to_binary('If-None-Match') -> <<"If-None-Match">>;
-header_to_binary('If-Range') -> <<"If-Range">>;
-header_to_binary('If-Unmodified-Since') -> <<"If-Unmodified-Since">>;
-header_to_binary('Max-Forwards') -> <<"Max-Forwards">>;
-header_to_binary('Proxy-Authorization') -> <<"Proxy-Authorization">>;
-header_to_binary('Range') -> <<"Range">>;
-header_to_binary('Referer') -> <<"Referer">>;
-header_to_binary('User-Agent') -> <<"User-Agent">>;
-header_to_binary('Age') -> <<"Age">>;
-header_to_binary('Location') -> <<"Location">>;
-header_to_binary('Proxy-Authenticate') -> <<"Proxy-Authenticate">>;
-header_to_binary('Public') -> <<"Public">>;
-header_to_binary('Retry-After') -> <<"Retry-After">>;
-header_to_binary('Server') -> <<"Server">>;
-header_to_binary('Vary') -> <<"Vary">>;
-header_to_binary('Warning') -> <<"Warning">>;
-header_to_binary('Www-Authenticate') -> <<"Www-Authenticate">>;
-header_to_binary('Allow') -> <<"Allow">>;
-header_to_binary('Content-Base') -> <<"Content-Base">>;
-header_to_binary('Content-Encoding') -> <<"Content-Encoding">>;
-header_to_binary('Content-Language') -> <<"Content-Language">>;
-header_to_binary('Content-Length') -> <<"Content-Length">>;
-header_to_binary('Content-Location') -> <<"Content-Location">>;
-header_to_binary('Content-Md5') -> <<"Content-Md5">>;
-header_to_binary('Content-Range') -> <<"Content-Range">>;
-header_to_binary('Content-Type') -> <<"Content-Type">>;
-header_to_binary('Etag') -> <<"Etag">>;
-header_to_binary('Expires') -> <<"Expires">>;
-header_to_binary('Last-Modified') -> <<"Last-Modified">>;
-header_to_binary('Accept-Ranges') -> <<"Accept-Ranges">>;
-header_to_binary('Set-Cookie') -> <<"Set-Cookie">>;
-header_to_binary('Set-Cookie2') -> <<"Set-Cookie2">>;
-header_to_binary('X-Forwarded-For') -> <<"X-Forwarded-For">>;
-header_to_binary('Cookie') -> <<"Cookie">>;
-header_to_binary('Keep-Alive') -> <<"Keep-Alive">>;
-header_to_binary('Proxy-Connection') -> <<"Proxy-Connection">>;
-header_to_binary(B) when is_binary(B) -> B.
-
-%% Tests.
-
--ifdef(TEST).
-
-parse_qs_test_() ->
-       %% {Qs, Result}
-       Tests = [
-               {<<"">>, []},
-               {<<"a=b">>, [{<<"a">>, <<"b">>}]},
-               {<<"aaa=bbb">>, [{<<"aaa">>, <<"bbb">>}]},
-               {<<"a&b">>, [{<<"a">>, true}, {<<"b">>, true}]},
-               {<<"a=b&c&d=e">>, [{<<"a">>, <<"b">>},
-                       {<<"c">>, true}, {<<"d">>, <<"e">>}]},
-               {<<"a=b=c=d=e&f=g">>, [{<<"a">>, <<"b=c=d=e">>}, {<<"f">>, <<"g">>}]},
-               {<<"a+b=c+d">>, [{<<"a b">>, <<"c d">>}]}
-       ],
-       URLDecode = fun cowboy_http:urldecode/1,
-       [{Qs, fun() -> R = parse_qs(Qs, URLDecode) end} || {Qs, R} <- Tests].
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl
deleted file mode 100644 (file)
index da3bd33..0000000
+++ /dev/null
@@ -1,456 +0,0 @@
-%% Copyright (c) 2011, Magnus Klaar <magnus.klaar@gmail.com>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Static resource handler.
-%%
-%% This built in HTTP handler provides a simple file serving capability for
-%% cowboy applications. It should be considered an experimental feature because
-%% of it's dependency on the experimental REST handler. It's recommended to be
-%% used for small or temporary environments where it is not preferrable to set
-%% up a second server just to serve files.
-%%
-%% If this handler is used the Erlang node running the cowboy application must
-%% be configured to use an async thread pool. This is configured by adding the
-%% `+A $POOL_SIZE' argument to the `erl' command used to start the node. See
-%% <a href="http://erlang.org/pipermail/erlang-bugs/2012-January/002720.html">
-%% this reply</a> from the OTP team to erlang-bugs
-%%
-%% == Base configuration ==
-%%
-%% The handler must be configured with a request path prefix to serve files
-%% under and the path to a directory to read files from. The request path prefix
-%% is defined in the path pattern of the cowboy dispatch rule for the handler.
-%% The request path pattern must end with a ``'...''' token.
-%% The directory path can be set to either an absolute or relative path in the
-%% form of a list or binary string representation of a file system path. A list
-%% of binary path segments, as is used throughout cowboy, is also a valid
-%% directory path.
-%%
-%% The directory path can also be set to a relative path within the `priv/'
-%% directory of an application. This is configured by setting the value of the
-%% directory option to a tuple of the form `{priv_dir, Application, Relpath}'.
-%%
-%% ==== Examples ====
-%% ```
-%% %% Serve files from /var/www/ under http://example.com/static/
-%% {[<<"static">>, '...'], cowboy_http_static,
-%%     [{directory, "/var/www"}]}
-%%
-%% %% Serve files from the current working directory under http://example.com/static/
-%% {[<<"static">>, '...'], cowboy_http_static,
-%%     [{directory, <<"./">>}]}
-%%
-%% %% Serve files from cowboy/priv/www under http://example.com/
-%% {['...'], cowboy_http_static,
-%%     [{directory, {priv_dir, cowboy, [<<"www">>]}}]}
-%% '''
-%%
-%% == Content type configuration  ==
-%%
-%% By default the content type of all static resources will be set to
-%% `application/octet-stream'. This can be overriden by supplying a list
-%% of filename extension to mimetypes pairs in the `mimetypes' option.
-%% The filename extension should be a binary string including the leading dot.
-%% The mimetypes must be of a type that the `cowboy_http_rest' protocol can
-%% handle.
-%%
-%% The <a href="https://github.com/spawngrid/mimetypes">spawngrid/mimetypes</a>
-%% application, or an arbitrary function accepting the path to the file being
-%% served, can also be used to generate the list of content types for a static
-%% file resource. The function used must accept an additional argument after
-%% the file path argument.
-%%
-%% ==== Example ====
-%% ```
-%% %% Use a static list of content types.
-%% {[<<"static">>, '...'], cowboy_http_static,
-%%     [{directory, {priv_dir, cowboy, []}},
-%%      {mimetypes, [
-%%          {<<".css">>, [<<"text/css">>]},
-%%          {<<".js">>, [<<"application/javascript">>]}]}]}
-%%
-%% %% Use the default database in the mimetypes application.
-%% {[<<"static">>, '...', cowboy_http_static,
-%%     [{directory, {priv_dir, cowboy, []}},
-%%      {mimetypes, {fun mimetypes:path_to_mimes/2, default}}]]}
-%% '''
-%%
-%% == ETag Header Function ==
-%%
-%% The default behaviour of the static file handler is to not generate ETag
-%% headers. This is because generating ETag headers based on file metadata
-%% causes different servers in a cluster to generate different ETag values for
-%% the same file unless the metadata is also synced. Generating strong ETags
-%% based on the contents of a file is currently out of scope for this module.
-%%
-%% The default behaviour can be overridden to generate an ETag header based on
-%% a combination of the file path, file size, inode and mtime values. If the
-%% option value is a list of attribute names tagged with `attributes' a hex
-%% encoded CRC32 checksum of the attribute values are used as the ETag header
-%% value.
-%%
-%% If a strong ETag is required a user defined function for generating the
-%% header value can be supplied. The function must accept a proplist of the
-%% file attributes as the first argument and a second argument containing any
-%% additional data that the function requires. The function must return a
-%% `binary()' or `undefined'.
-%%
-%% ====  Examples ====
-%% ```
-%% %% A value of default is equal to not specifying the option.
-%% {[<<"static">>, '...', cowboy_http_static,
-%%     [{directory, {priv_dir, cowboy, []}},
-%%      {etag, default}]]}
-%%
-%% %% Use all avaliable ETag function arguments to generate a header value.
-%% {[<<"static">>, '...', cowboy_http_static,
-%%     [{directory, {priv_dir, cowboy, []}},
-%%      {etag, {attributes, [filepath, filesize, inode, mtime]}}]]}
-%%
-%% %% Use a user defined function to generate a strong ETag header value.
-%% {[<<"static">>, '...', cowboy_http_static,
-%%     [{directory, {priv_dir, cowboy, []}},
-%%      {etag, {fun generate_strong_etag/2, strong_etag_extra}}]]}
-%%
-%% generate_strong_etag(Arguments, strong_etag_extra) ->
-%%     {_, Filepath} = lists:keyfind(filepath, 1, Arguments),
-%%     {_, _Filesize} = lists:keyfind(filesize, 1, Arguments),
-%%     {_, _INode} = lists:keyfind(inode, 1, Arguments),
-%%     {_, _Modified} = lists:keyfind(mtime, 1, Arguments),
-%%     ChecksumCommand = lists:flatten(io_lib:format("sha1sum ~s", [Filepath])),
-%%     [Checksum|_] = string:tokens(os:cmd(ChecksumCommand), " "),
-%%     iolist_to_binary(Checksum).
-%% '''
--module(cowboy_http_static).
-
-%% include files
--include("http.hrl").
--include_lib("kernel/include/file.hrl").
-
-%% cowboy_http_protocol callbacks
--export([init/3]).
-
-%% cowboy_http_rest callbacks
--export([rest_init/2, allowed_methods/2, malformed_request/2,
-       resource_exists/2, forbidden/2, last_modified/2, generate_etag/2,
-       content_types_provided/2, file_contents/2]).
-
-%% internal
--export([path_to_mimetypes/2]).
-
-%% types
--type dirpath() :: string() | binary() | [binary()].
--type dirspec() :: dirpath() | {priv, atom(), dirpath()}.
--type mimedef() :: {binary(), binary(), [{binary(), binary()}]}.
--type etagarg() :: {filepath, binary()} | {mtime, calendar:datetime()}
-       | {inode, non_neg_integer()} | {filesize, non_neg_integer()}.
-
-%% handler state
--record(state, {
-       filepath  :: binary() | error,
-       fileinfo  :: {ok, #file_info{}} | {error, _} | error,
-       mimetypes :: {fun((binary(), T) -> [mimedef()]), T} | undefined,
-       etag_fun  :: {fun(([etagarg()], T) -> undefined | binary()), T}}).
-
-
-%% @private Upgrade from HTTP handler to REST handler.
-init({_Transport, http}, _Req, _Opts) ->
-       {upgrade, protocol, cowboy_http_rest}.
-
-
-%% @private Set up initial state of REST handler.
--spec rest_init(#http_req{}, list()) -> {ok, #http_req{}, #state{}}.
-rest_init(Req, Opts) ->
-       Directory = proplists:get_value(directory, Opts),
-       Directory1 = directory_path(Directory),
-       Mimetypes = proplists:get_value(mimetypes, Opts, []),
-       Mimetypes1 = case Mimetypes of
-               {_, _} -> Mimetypes;
-               [] -> {fun path_to_mimetypes/2, []};
-               [_|_] -> {fun path_to_mimetypes/2, Mimetypes}
-       end,
-       ETagFunction = case proplists:get_value(etag, Opts) of
-               default -> {fun no_etag_function/2, undefined};
-               undefined -> {fun no_etag_function/2, undefined};
-               {attributes, Attrs} -> {fun attr_etag_function/2, Attrs};
-               {_, _}=EtagFunction1 -> EtagFunction1
-       end,
-       {Filepath, Req1} = cowboy_http_req:path_info(Req),
-       State = case check_path(Filepath) of
-               error ->
-                       #state{filepath=error, fileinfo=error, mimetypes=undefined,
-                               etag_fun=ETagFunction};
-               ok ->
-                       Filepath1 = join_paths(Directory1, Filepath),
-                       Fileinfo = file:read_file_info(Filepath1),
-                       #state{filepath=Filepath1, fileinfo=Fileinfo, mimetypes=Mimetypes1,
-                               etag_fun=ETagFunction}
-       end,
-       {ok, Req1, State}.
-
-
-%% @private Only allow GET and HEAD requests on files.
--spec allowed_methods(#http_req{}, #state{}) ->
-               {[atom()], #http_req{}, #state{}}.
-allowed_methods(Req, State) ->
-       {['GET', 'HEAD'], Req, State}.
-
-%% @private
-malformed_request(Req, #state{filepath=error}=State) ->
-       {true, Req, State};
-malformed_request(Req, State) ->
-       {false, Req, State}.
-
-
-%% @private Check if the resource exists under the document root.
-resource_exists(Req, #state{fileinfo={error, _}}=State) ->
-       {false, Req, State};
-resource_exists(Req, #state{fileinfo={ok, Fileinfo}}=State) ->
-       {Fileinfo#file_info.type =:= regular, Req, State}.
-
-
-%% @private
-%% Access to a file resource is forbidden if it exists and the local node does
-%% not have permission to read it. Directory listings are always forbidden.
-forbidden(Req, #state{fileinfo={_, #file_info{type=directory}}}=State) ->
-       {true, Req, State};
-forbidden(Req, #state{fileinfo={error, eacces}}=State) ->
-       {true, Req, State};
-forbidden(Req, #state{fileinfo={error, _}}=State) ->
-       {false, Req, State};
-forbidden(Req, #state{fileinfo={ok, #file_info{access=Access}}}=State) ->
-       {not (Access =:= read orelse Access =:= read_write), Req, State}.
-
-
-%% @private Read the time a file system system object was last modified.
--spec last_modified(#http_req{}, #state{}) ->
-               {calendar:datetime(), #http_req{}, #state{}}.
-last_modified(Req, #state{fileinfo={ok, #file_info{mtime=Modified}}}=State) ->
-       {Modified, Req, State}.
-
-
-%% @private Generate the ETag header value for this file.
-%% The ETag header value is only generated if the resource is a file that
-%% exists in document root.
--spec generate_etag(#http_req{}, #state{}) ->
-       {undefined | binary(), #http_req{}, #state{}}.
-generate_etag(Req, #state{fileinfo={_, #file_info{type=regular, inode=INode,
-               mtime=Modified, size=Filesize}}, filepath=Filepath,
-               etag_fun={ETagFun, ETagData}}=State) ->
-       ETagArgs = [
-               {filepath, Filepath}, {filesize, Filesize},
-               {inode, INode}, {mtime, Modified}],
-       {ETagFun(ETagArgs, ETagData), Req, State};
-generate_etag(Req, State) ->
-       {undefined, Req, State}.
-
-
-%% @private Return the content type of a file.
--spec content_types_provided(#http_req{}, #state{}) -> tuple().
-content_types_provided(Req, #state{filepath=Filepath,
-               mimetypes={MimetypesFun, MimetypesData}}=State) ->
-       Mimetypes = [{T, file_contents}
-               || T <- MimetypesFun(Filepath, MimetypesData)],
-       {Mimetypes, Req, State}.
-
-
-%% @private Return a function that writes a file directly to the socket.
--spec file_contents(#http_req{}, #state{}) -> tuple().
-file_contents(Req, #state{filepath=Filepath,
-               fileinfo={ok, #file_info{size=Filesize}}}=State) ->
-       {ok, Transport, Socket} = cowboy_http_req:transport(Req),
-       Writefile = content_function(Transport, Socket, Filepath),
-       {{stream, Filesize, Writefile}, Req, State}.
-
-
-%% @private Return a function writing the contents of a file to a socket.
-%% The function returns the number of bytes written to the socket to enable
-%% the calling function to determine if the expected number of bytes were
-%% written to the socket.
--spec content_function(module(), inet:socket(), binary()) ->
-       fun(() -> {sent, non_neg_integer()}).
-content_function(Transport, Socket, Filepath) ->
-       %% `file:sendfile/2' will only work with the `cowboy_tcp_transport'
-       %% transport module. SSL or future SPDY transports that require the
-       %% content to be encrypted or framed as the content is sent.
-       case erlang:function_exported(file, sendfile, 2) of
-               false ->
-                       fun() -> sfallback(Transport, Socket, Filepath) end;
-               _ when Transport =/= cowboy_tcp_transport ->
-                       fun() -> sfallback(Transport, Socket, Filepath) end;
-               true ->
-                       fun() -> sendfile(Socket, Filepath) end
-       end.
-
-
-%% @private Sendfile fallback function.
--spec sfallback(module(), inet:socket(), binary()) -> {sent, non_neg_integer()}.
-sfallback(Transport, Socket, Filepath) ->
-       {ok, File} = file:open(Filepath, [read,binary,raw]),
-       sfallback(Transport, Socket, File, 0).
-
--spec sfallback(module(), inet:socket(), file:io_device(),
-               non_neg_integer()) -> {sent, non_neg_integer()}.
-sfallback(Transport, Socket, File, Sent) ->
-       case file:read(File, 16#1FFF) of
-               eof ->
-                       ok = file:close(File),
-                       {sent, Sent};
-               {ok, Bin} ->
-                       ok = Transport:send(Socket, Bin),
-                       sfallback(Transport, Socket, File, Sent + byte_size(Bin))
-       end.
-
-
-%% @private Wrapper for sendfile function.
--spec sendfile(inet:socket(), binary()) -> {sent, non_neg_integer()}.
-sendfile(Socket, Filepath) ->
-       {ok, Sent} = file:sendfile(Filepath, Socket),
-       {sent, Sent}.
-
--spec directory_path(dirspec()) -> dirpath().
-directory_path({priv_dir, App, []}) ->
-       priv_dir_path(App);
-directory_path({priv_dir, App, [H|_]=Path}) when is_integer(H) ->
-       filename:join(priv_dir_path(App), Path);
-directory_path({priv_dir, App, [H|_]=Path}) when is_binary(H) ->
-       filename:join(filename:split(priv_dir_path(App)) ++ Path);
-directory_path({priv_dir, App, Path}) when is_binary(Path) ->
-       filename:join(priv_dir_path(App), Path);
-directory_path(Path) ->
-       Path.
-
-
-%% @private Validate a request path for unsafe characters.
-%% There is no way to escape special characters in a filesystem path.
--spec check_path(Path::[binary()]) -> ok | error.
-check_path([]) -> ok;
-check_path([<<"">>|_T]) -> error;
-check_path([<<".">>|_T]) -> error;
-check_path([<<"..">>|_T]) -> error;
-check_path([H|T]) ->
-       case binary:match(H, <<"/">>) of
-               {_, _} -> error;
-               nomatch -> check_path(T)
-       end.
-
-
-%% @private Join the the directory and request paths.
--spec join_paths(dirpath(), [binary()]) -> binary().
-join_paths([H|_]=Dirpath, Filepath) when is_integer(H) ->
-       filename:join(filename:split(Dirpath) ++ Filepath);
-join_paths([H|_]=Dirpath, Filepath) when is_binary(H) ->
-       filename:join(Dirpath ++ Filepath);
-join_paths(Dirpath, Filepath) when is_binary(Dirpath) ->
-       filename:join([Dirpath] ++ Filepath);
-join_paths([], Filepath) ->
-       filename:join(Filepath).
-
-
-%% @private Return the path to the priv/ directory of an application.
--spec priv_dir_path(atom()) -> string().
-priv_dir_path(App) ->
-       case code:priv_dir(App) of
-               {error, bad_name} -> priv_dir_mod(App);
-               Dir -> Dir
-       end.
-
--spec priv_dir_mod(atom()) -> string().
-priv_dir_mod(Mod) ->
-       case code:which(Mod) of
-               File when not is_list(File) -> "../priv";
-               File -> filename:join([filename:dirname(File),"../priv"])
-       end.
-
-
-%% @private Use application/octet-stream as the default mimetype.
-%% If a list of extension - mimetype pairs are provided as the mimetypes
-%% an attempt to find the mimetype using the file extension. If no match
-%% is found the default mimetype is returned.
--spec path_to_mimetypes(binary(), [{binary(), [mimedef()]}]) ->
-               [mimedef()].
-path_to_mimetypes(Filepath, Extensions) when is_binary(Filepath) ->
-       Ext = filename:extension(Filepath),
-       case Ext of
-               <<>> -> default_mimetype();
-               _Ext -> path_to_mimetypes_(Ext, Extensions)
-       end.
-
--spec path_to_mimetypes_(binary(), [{binary(), [mimedef()]}]) -> [mimedef()].
-path_to_mimetypes_(Ext, Extensions) ->
-       case lists:keyfind(Ext, 1, Extensions) of
-               {_, MTs} -> MTs;
-               _Unknown -> default_mimetype()
-       end.
-
--spec default_mimetype() -> [mimedef()].
-default_mimetype() ->
-       [{<<"application">>, <<"octet-stream">>, []}].
-
-
-%% @private Do not send ETag headers in the default configuration.
--spec no_etag_function([etagarg()], undefined) -> undefined.
-no_etag_function(_Args, undefined) ->
-       undefined.
-
-%% @private A simple alternative is to send an ETag based on file attributes.
--type fileattr() :: filepath | filesize | mtime | inode.
--spec attr_etag_function([etagarg()], [fileattr()]) -> binary().
-attr_etag_function(Args, Attrs) ->
-       attr_etag_function(Args, Attrs, []).
-
--spec attr_etag_function([etagarg()], [fileattr()], [binary()]) -> binary().
-attr_etag_function(_Args, [], Acc) ->
-       list_to_binary(erlang:integer_to_list(erlang:crc32(Acc), 16));
-attr_etag_function(Args, [H|T], Acc) ->
-       {_, Value} = lists:keyfind(H, 1, Args),
-       attr_etag_function(Args, T, [term_to_binary(Value)|Acc]).
-
-
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
--define(_eq(E, I), ?_assertEqual(E, I)).
-
-check_path_test_() ->
-       C = fun check_path/1,
-       [?_eq(error, C([<<>>])),
-        ?_eq(ok, C([<<"abc">>])),
-        ?_eq(error, C([<<".">>])),
-        ?_eq(error, C([<<"..">>])),
-        ?_eq(error, C([<<"/">>]))
-       ].
-
-join_paths_test_() ->
-       P = fun join_paths/2,
-       [?_eq(<<"a">>, P([], [<<"a">>])),
-        ?_eq(<<"a/b/c">>, P(<<"a/b">>, [<<"c">>])),
-        ?_eq(<<"a/b/c">>, P("a/b", [<<"c">>])),
-        ?_eq(<<"a/b/c">>, P([<<"a">>, <<"b">>], [<<"c">>]))
-       ].
-
-directory_path_test_() ->
-       P = fun directory_path/1,
-       PL = fun(I) -> length(filename:split(P(I))) end,
-       Base = PL({priv_dir, cowboy, []}),
-       [?_eq(Base + 1, PL({priv_dir, cowboy, "a"})),
-        ?_eq(Base + 1, PL({priv_dir, cowboy, <<"a">>})),
-        ?_eq(Base + 1, PL({priv_dir, cowboy, [<<"a">>]})),
-        ?_eq(Base + 2, PL({priv_dir, cowboy, "a/b"})),
-        ?_eq(Base + 2, PL({priv_dir, cowboy, <<"a/b">>})),
-        ?_eq(Base + 2, PL({priv_dir, cowboy, [<<"a">>, <<"b">>]})),
-        ?_eq("a/b", P("a/b"))
-       ].
-
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl
deleted file mode 100644 (file)
index 5100213..0000000
+++ /dev/null
@@ -1,530 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc WebSocket protocol implementation.
-%%
-%% Supports the protocol version 0 (hixie-76), version 7 (hybi-7)
-%% and version 8 (hybi-8, hybi-9 and hybi-10).
-%%
-%% Version 0 is supported by the following browsers:
-%% <ul>
-%%  <li>Firefox 4-5 (disabled by default)</li>
-%%  <li>Chrome 6-13</li>
-%%  <li>Safari 5.0.1+</li>
-%%  <li>Opera 11.00+ (disabled by default)</li>
-%% </ul>
-%%
-%% Version 7 is supported by the following browser:
-%% <ul>
-%%  <li>Firefox 6</li>
-%% </ul>
-%%
-%% Version 8+ is supported by the following browsers:
-%% <ul>
-%%  <li>Firefox 7+</li>
-%%  <li>Chrome 14+</li>
-%% </ul>
--module(cowboy_http_websocket).
-
--export([upgrade/4]). %% API.
--export([handler_loop/4]). %% Internal.
-
--include("include/http.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--type opcode() :: 0 | 1 | 2 | 8 | 9 | 10.
--type mask_key() :: 0..16#ffffffff.
-
--record(state, {
-       version :: 0 | 7 | 8 | 13,
-       handler :: module(),
-       opts :: any(),
-       challenge = undefined :: undefined | binary() | {binary(), binary()},
-       timeout = infinity :: timeout(),
-       timeout_ref = undefined,
-       messages = undefined :: undefined | {atom(), atom(), atom()},
-       hibernate = false,
-       eop :: undefined | tuple(), %% hixie-76 specific.
-       origin = undefined :: undefined | binary() %% hixie-76 specific.
-}).
-
-%% @doc Upgrade a HTTP request to the WebSocket protocol.
-%%
-%% You do not need to call this function manually. To upgrade to the WebSocket
-%% protocol, you simply need to return <em>{upgrade, protocol, {@module}}</em>
-%% in your <em>cowboy_http_handler:init/3</em> handler function.
--spec upgrade(pid(), module(), any(), #http_req{}) -> closed | none().
-upgrade(ListenerPid, Handler, Opts, Req) ->
-       cowboy_listener:move_connection(ListenerPid, websocket, self()),
-       case catch websocket_upgrade(#state{handler=Handler, opts=Opts}, Req) of
-               {ok, State, Req2} -> handler_init(State, Req2);
-               {'EXIT', _Reason} -> upgrade_error(Req)
-       end.
-
--spec websocket_upgrade(#state{}, #http_req{}) -> {ok, #state{}, #http_req{}}.
-websocket_upgrade(State, Req) ->
-       {ConnTokens, Req2}
-               = cowboy_http_req:parse_header('Connection', Req),
-       true = lists:member(<<"upgrade">>, ConnTokens),
-       %% @todo Should probably send a 426 if the Upgrade header is missing.
-       {[<<"websocket">>], Req3} = cowboy_http_req:parse_header('Upgrade', Req2),
-       {Version, Req4} = cowboy_http_req:header(<<"Sec-Websocket-Version">>, Req3),
-       websocket_upgrade(Version, State, Req4).
-
-%% @todo Handle the Sec-Websocket-Protocol header.
-%% @todo Reply a proper error, don't die, if a required header is undefined.
--spec websocket_upgrade(undefined | <<_:8>>, #state{}, #http_req{})
-       -> {ok, #state{}, #http_req{}}.
-%% No version given. Assuming hixie-76 draft.
-%%
-%% We need to wait to send a reply back before trying to read the
-%% third part of the challenge key, because proxies will wait for
-%% a reply before sending it. Therefore we calculate the challenge
-%% key only in websocket_handshake/3.
-websocket_upgrade(undefined, State, Req=#http_req{meta=Meta}) ->
-       {Origin, Req2} = cowboy_http_req:header(<<"Origin">>, Req),
-       {Key1, Req3} = cowboy_http_req:header(<<"Sec-Websocket-Key1">>, Req2),
-       {Key2, Req4} = cowboy_http_req:header(<<"Sec-Websocket-Key2">>, Req3),
-       false = lists:member(undefined, [Origin, Key1, Key2]),
-       EOP = binary:compile_pattern(<< 255 >>),
-       {ok, State#state{version=0, origin=Origin, challenge={Key1, Key2},
-               eop=EOP}, Req4#http_req{meta=[{websocket_version, 0}|Meta]}};
-%% Versions 7 and 8. Implementation follows the hybi 7 through 17 drafts.
-websocket_upgrade(Version, State, Req=#http_req{meta=Meta})
-               when Version =:= <<"7">>; Version =:= <<"8">>;
-                       Version =:= <<"13">> ->
-       {Key, Req2} = cowboy_http_req:header(<<"Sec-Websocket-Key">>, Req),
-       false = Key =:= undefined,
-       Challenge = hybi_challenge(Key),
-       IntVersion = list_to_integer(binary_to_list(Version)),
-       {ok, State#state{version=IntVersion, challenge=Challenge},
-               Req2#http_req{meta=[{websocket_version, IntVersion}|Meta]}}.
-
--spec handler_init(#state{}, #http_req{}) -> closed | none().
-handler_init(State=#state{handler=Handler, opts=Opts},
-               Req=#http_req{transport=Transport}) ->
-       try Handler:websocket_init(Transport:name(), Req, Opts) of
-               {ok, Req2, HandlerState} ->
-                       websocket_handshake(State, Req2, HandlerState);
-               {ok, Req2, HandlerState, hibernate} ->
-                       websocket_handshake(State#state{hibernate=true},
-                               Req2, HandlerState);
-               {ok, Req2, HandlerState, Timeout} ->
-                       websocket_handshake(State#state{timeout=Timeout},
-                               Req2, HandlerState);
-               {ok, Req2, HandlerState, Timeout, hibernate} ->
-                       websocket_handshake(State#state{timeout=Timeout,
-                               hibernate=true}, Req2, HandlerState);
-               {shutdown, Req2} ->
-                       upgrade_denied(Req2)
-       catch Class:Reason ->
-               upgrade_error(Req),
-               error_logger:error_msg(
-                       "** Handler ~p terminating in websocket_init/3~n"
-                       "   for the reason ~p:~p~n** Options were ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, Opts, Req, erlang:get_stacktrace()])
-       end.
-
--spec upgrade_error(#http_req{}) -> closed.
-upgrade_error(Req) ->
-       {ok, _Req2} = cowboy_http_req:reply(400, [], [],
-               Req#http_req{resp_state=waiting}),
-       closed.
-
-%% @see cowboy_http_protocol:ensure_response/1
--spec upgrade_denied(#http_req{}) -> closed.
-upgrade_denied(#http_req{resp_state=done}) ->
-       closed;
-upgrade_denied(Req=#http_req{resp_state=waiting}) ->
-       {ok, _Req2} = cowboy_http_req:reply(400, [], [], Req),
-       closed;
-upgrade_denied(#http_req{method='HEAD', resp_state=chunks}) ->
-       closed;
-upgrade_denied(#http_req{socket=Socket, transport=Transport,
-               resp_state=chunks}) ->
-       Transport:send(Socket, <<"0\r\n\r\n">>),
-       closed.
-
--spec websocket_handshake(#state{}, #http_req{}, any()) -> closed | none().
-websocket_handshake(State=#state{version=0, origin=Origin,
-               challenge={Key1, Key2}}, Req=#http_req{socket=Socket,
-               transport=Transport, raw_host=Host, port=Port,
-               raw_path=Path, raw_qs=QS}, HandlerState) ->
-       Location = hixie76_location(Transport:name(), Host, Port, Path, QS),
-       {ok, Req2} = cowboy_http_req:upgrade_reply(
-               <<"101 WebSocket Protocol Handshake">>,
-               [{<<"Upgrade">>, <<"WebSocket">>},
-                {<<"Sec-Websocket-Location">>, Location},
-                {<<"Sec-Websocket-Origin">>, Origin}],
-               Req#http_req{resp_state=waiting}),
-       %% Flush the resp_sent message before moving on.
-       receive {cowboy_http_req, resp_sent} -> ok after 0 -> ok end,
-       %% We replied with a proper response. Proxies should be happy enough,
-       %% we can now read the 8 last bytes of the challenge keys and send
-       %% the challenge response directly to the socket.
-       case cowboy_http_req:body(8, Req2) of
-               {ok, Key3, Req3} ->
-                       Challenge = hixie76_challenge(Key1, Key2, Key3),
-                       Transport:send(Socket, Challenge),
-                       handler_before_loop(State#state{messages=Transport:messages()},
-                               Req3, HandlerState, <<>>);
-               _Any ->
-                       closed %% If an error happened reading the body, stop there.
-       end;
-websocket_handshake(State=#state{challenge=Challenge},
-               Req=#http_req{transport=Transport}, HandlerState) ->
-       {ok, Req2} = cowboy_http_req:upgrade_reply(
-               101,
-               [{<<"Upgrade">>, <<"websocket">>},
-                {<<"Sec-Websocket-Accept">>, Challenge}],
-               Req#http_req{resp_state=waiting}),
-       %% Flush the resp_sent message before moving on.
-       receive {cowboy_http_req, resp_sent} -> ok after 0 -> ok end,
-       handler_before_loop(State#state{messages=Transport:messages()},
-               Req2, HandlerState, <<>>).
-
--spec handler_before_loop(#state{}, #http_req{}, any(), binary()) -> closed | none().
-handler_before_loop(State=#state{hibernate=true},
-               Req=#http_req{socket=Socket, transport=Transport},
-               HandlerState, SoFar) ->
-       Transport:setopts(Socket, [{active, once}]),
-       State2 = handler_loop_timeout(State),
-       erlang:hibernate(?MODULE, handler_loop, [State2#state{hibernate=false},
-               Req, HandlerState, SoFar]);
-handler_before_loop(State, Req=#http_req{socket=Socket, transport=Transport},
-               HandlerState, SoFar) ->
-       Transport:setopts(Socket, [{active, once}]),
-       State2 = handler_loop_timeout(State),
-       handler_loop(State2, Req, HandlerState, SoFar).
-
--spec handler_loop_timeout(#state{}) -> #state{}.
-handler_loop_timeout(State=#state{timeout=infinity}) ->
-       State#state{timeout_ref=undefined};
-handler_loop_timeout(State=#state{timeout=Timeout, timeout_ref=PrevRef}) ->
-       _ = case PrevRef of undefined -> ignore; PrevRef ->
-               erlang:cancel_timer(PrevRef) end,
-       TRef = make_ref(),
-       erlang:send_after(Timeout, self(), {?MODULE, timeout, TRef}),
-       State#state{timeout_ref=TRef}.
-
-%% @private
--spec handler_loop(#state{}, #http_req{}, any(), binary()) -> closed | none().
-handler_loop(State=#state{messages={OK, Closed, Error}, timeout_ref=TRef},
-               Req=#http_req{socket=Socket}, HandlerState, SoFar) ->
-       receive
-               {OK, Socket, Data} ->
-                       websocket_data(State, Req, HandlerState,
-                               << SoFar/binary, Data/binary >>);
-               {Closed, Socket} ->
-                       handler_terminate(State, Req, HandlerState, {error, closed});
-               {Error, Socket, Reason} ->
-                       handler_terminate(State, Req, HandlerState, {error, Reason});
-               {?MODULE, timeout, TRef} ->
-                       websocket_close(State, Req, HandlerState, {normal, timeout});
-               {?MODULE, timeout, OlderTRef} when is_reference(OlderTRef) ->
-                       handler_loop(State, Req, HandlerState, SoFar);
-               Message ->
-                       handler_call(State, Req, HandlerState,
-                               SoFar, websocket_info, Message, fun handler_before_loop/4)
-       end.
-
--spec websocket_data(#state{}, #http_req{}, any(), binary()) -> closed | none().
-%% No more data.
-websocket_data(State, Req, HandlerState, <<>>) ->
-       handler_before_loop(State, Req, HandlerState, <<>>);
-%% hixie-76 close frame.
-websocket_data(State=#state{version=0}, Req, HandlerState,
-               << 255, 0, _Rest/binary >>) ->
-       websocket_close(State, Req, HandlerState, {normal, closed});
-%% hixie-76 data frame. We only support the frame type 0, same as the specs.
-websocket_data(State=#state{version=0, eop=EOP}, Req, HandlerState,
-               Data = << 0, _/binary >>) ->
-       case binary:match(Data, EOP) of
-               {Pos, 1} ->
-                       Pos2 = Pos - 1,
-                       << 0, Payload:Pos2/binary, 255, Rest/bits >> = Data,
-                       handler_call(State, Req, HandlerState,
-                               Rest, websocket_handle, {text, Payload}, fun websocket_data/4);
-               nomatch ->
-                       %% @todo We probably should allow limiting frame length.
-                       handler_before_loop(State, Req, HandlerState, Data)
-       end;
-%% incomplete hybi data frame.
-websocket_data(State=#state{version=Version}, Req, HandlerState, Data)
-               when Version =/= 0, byte_size(Data) =:= 1 ->
-       handler_before_loop(State, Req, HandlerState, Data);
-%% hybi data frame.
-%% @todo Handle Fin.
-websocket_data(State=#state{version=Version}, Req, HandlerState, Data)
-               when Version =/= 0 ->
-       << 1:1, 0:3, Opcode:4, Mask:1, PayloadLen:7, Rest/bits >> = Data,
-       case {PayloadLen, Rest} of
-               {126, _} when Opcode >= 8 -> websocket_close(
-                       State, Req, HandlerState, {error, protocol});
-               {127, _} when Opcode >= 8 -> websocket_close(
-                       State, Req, HandlerState, {error, protocol});
-               {126, << L:16, R/bits >>}  -> websocket_before_unmask(
-                       State, Req, HandlerState, Data, R, Opcode, Mask, L);
-               {126, Rest} -> websocket_before_unmask(
-                       State, Req, HandlerState, Data, Rest, Opcode, Mask, undefined);
-               {127, << 0:1, L:63, R/bits >>} -> websocket_before_unmask(
-                       State, Req, HandlerState, Data, R, Opcode, Mask, L);
-               {127, Rest} -> websocket_before_unmask(
-                       State, Req, HandlerState, Data, Rest, Opcode, Mask, undefined);
-               {PayloadLen, Rest} -> websocket_before_unmask(
-                       State, Req, HandlerState, Data, Rest, Opcode, Mask, PayloadLen)
-       end;
-%% Something was wrong with the frame. Close the connection.
-websocket_data(State, Req, HandlerState, _Bad) ->
-       websocket_close(State, Req, HandlerState, {error, badframe}).
-
-%% hybi routing depending on whether unmasking is needed.
--spec websocket_before_unmask(#state{}, #http_req{}, any(), binary(),
-       binary(), opcode(), 0 | 1, non_neg_integer() | undefined)
-       -> closed | none().
-websocket_before_unmask(State, Req, HandlerState, Data,
-               Rest, Opcode, Mask, PayloadLen) ->
-       case {Mask, PayloadLen} of
-               {0, 0} ->
-                       websocket_dispatch(State, Req, HandlerState, Rest, Opcode, <<>>);
-               {1, N} when N + 4 > byte_size(Rest); N =:= undefined ->
-                       %% @todo We probably should allow limiting frame length.
-                       handler_before_loop(State, Req, HandlerState, Data);
-               {1, _N} ->
-                       << MaskKey:32, Payload:PayloadLen/binary, Rest2/bits >> = Rest,
-                       websocket_unmask(State, Req, HandlerState, Rest2,
-                               Opcode, Payload, MaskKey)
-       end.
-
-%% hybi unmasking.
--spec websocket_unmask(#state{}, #http_req{}, any(), binary(),
-       opcode(), binary(), mask_key()) -> closed | none().
-websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, Payload, MaskKey) ->
-       websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, Payload, MaskKey, <<>>).
-
--spec websocket_unmask(#state{}, #http_req{}, any(), binary(),
-       opcode(), binary(), mask_key(), binary()) -> closed | none().
-websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, << O:32, Rest/bits >>, MaskKey, Acc) ->
-       T = O bxor MaskKey,
-       websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, Rest, MaskKey, << Acc/binary, T:32 >>);
-websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, << O:24 >>, MaskKey, Acc) ->
-       << MaskKey2:24, _:8 >> = << MaskKey:32 >>,
-       T = O bxor MaskKey2,
-       websocket_dispatch(State, Req, HandlerState, RemainingData,
-               Opcode, << Acc/binary, T:24 >>);
-websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, << O:16 >>, MaskKey, Acc) ->
-       << MaskKey2:16, _:16 >> = << MaskKey:32 >>,
-       T = O bxor MaskKey2,
-       websocket_dispatch(State, Req, HandlerState, RemainingData,
-               Opcode, << Acc/binary, T:16 >>);
-websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, << O:8 >>, MaskKey, Acc) ->
-       << MaskKey2:8, _:24 >> = << MaskKey:32 >>,
-       T = O bxor MaskKey2,
-       websocket_dispatch(State, Req, HandlerState, RemainingData,
-               Opcode, << Acc/binary, T:8 >>);
-websocket_unmask(State, Req, HandlerState, RemainingData,
-               Opcode, <<>>, _MaskKey, Acc) ->
-       websocket_dispatch(State, Req, HandlerState, RemainingData,
-               Opcode, Acc).
-
-%% hybi dispatching.
--spec websocket_dispatch(#state{}, #http_req{}, any(), binary(),
-       opcode(), binary()) -> closed | none().
-%% @todo Fragmentation.
-%~ websocket_dispatch(State, Req, HandlerState, RemainingData, 0, Payload) ->
-%% Text frame.
-websocket_dispatch(State, Req, HandlerState, RemainingData, 1, Payload) ->
-       handler_call(State, Req, HandlerState, RemainingData,
-               websocket_handle, {text, Payload}, fun websocket_data/4);
-%% Binary frame.
-websocket_dispatch(State, Req, HandlerState, RemainingData, 2, Payload) ->
-       handler_call(State, Req, HandlerState, RemainingData,
-               websocket_handle, {binary, Payload}, fun websocket_data/4);
-%% Close control frame.
-%% @todo Handle the optional Payload.
-websocket_dispatch(State, Req, HandlerState, _RemainingData, 8, _Payload) ->
-       websocket_close(State, Req, HandlerState, {normal, closed});
-%% Ping control frame. Send a pong back and forward the ping to the handler.
-websocket_dispatch(State, Req=#http_req{socket=Socket, transport=Transport},
-               HandlerState, RemainingData, 9, Payload) ->
-       Len = hybi_payload_length(byte_size(Payload)),
-       Transport:send(Socket, << 1:1, 0:3, 10:4, 0:1, Len/bits, Payload/binary >>),
-       handler_call(State, Req, HandlerState, RemainingData,
-               websocket_handle, {ping, Payload}, fun websocket_data/4);
-%% Pong control frame.
-websocket_dispatch(State, Req, HandlerState, RemainingData, 10, Payload) ->
-       handler_call(State, Req, HandlerState, RemainingData,
-               websocket_handle, {pong, Payload}, fun websocket_data/4).
-
--spec handler_call(#state{}, #http_req{}, any(), binary(),
-       atom(), any(), fun()) -> closed | none().
-handler_call(State=#state{handler=Handler, opts=Opts}, Req, HandlerState,
-               RemainingData, Callback, Message, NextState) ->
-       try Handler:Callback(Message, Req, HandlerState) of
-               {ok, Req2, HandlerState2} ->
-                       NextState(State, Req2, HandlerState2, RemainingData);
-               {ok, Req2, HandlerState2, hibernate} ->
-                       NextState(State#state{hibernate=true},
-                               Req2, HandlerState2, RemainingData);
-               {reply, Payload, Req2, HandlerState2} ->
-                       websocket_send(Payload, State, Req2),
-                       NextState(State, Req2, HandlerState2, RemainingData);
-               {reply, Payload, Req2, HandlerState2, hibernate} ->
-                       websocket_send(Payload, State, Req2),
-                       NextState(State#state{hibernate=true},
-                               Req2, HandlerState2, RemainingData);
-               {shutdown, Req2, HandlerState2} ->
-                       websocket_close(State, Req2, HandlerState2, {normal, shutdown})
-       catch Class:Reason ->
-               error_logger:error_msg(
-                       "** Handler ~p terminating in ~p/3~n"
-                       "   for the reason ~p:~p~n** Message was ~p~n"
-                       "** Options were ~p~n** Handler state was ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Callback, Class, Reason, Message, Opts,
-                        HandlerState, Req, erlang:get_stacktrace()]),
-               websocket_close(State, Req, HandlerState, {error, handler})
-       end.
-
--spec websocket_send(binary(), #state{}, #http_req{}) -> closed | ignore.
-%% hixie-76 text frame.
-websocket_send({text, Payload}, #state{version=0},
-               #http_req{socket=Socket, transport=Transport}) ->
-       Transport:send(Socket, [0, Payload, 255]);
-%% Ignore all unknown frame types for compatibility with hixie 76.
-websocket_send(_Any, #state{version=0}, _Req) ->
-       ignore;
-websocket_send({Type, Payload}, _State,
-               #http_req{socket=Socket, transport=Transport}) ->
-       Opcode = case Type of
-               text -> 1;
-               binary -> 2;
-               ping -> 9;
-               pong -> 10
-       end,
-       Len = hybi_payload_length(iolist_size(Payload)),
-       Transport:send(Socket, [<< 1:1, 0:3, Opcode:4, 0:1, Len/bits >>,
-               Payload]).
-
--spec websocket_close(#state{}, #http_req{}, any(), {atom(), atom()}) -> closed.
-websocket_close(State=#state{version=0}, Req=#http_req{socket=Socket,
-               transport=Transport}, HandlerState, Reason) ->
-       Transport:send(Socket, << 255, 0 >>),
-       handler_terminate(State, Req, HandlerState, Reason);
-%% @todo Send a Payload? Using Reason is usually good but we're quite careless.
-websocket_close(State, Req=#http_req{socket=Socket,
-               transport=Transport}, HandlerState, Reason) ->
-       Transport:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>),
-       handler_terminate(State, Req, HandlerState, Reason).
-
--spec handler_terminate(#state{}, #http_req{},
-       any(), atom() | {atom(), atom()}) -> closed.
-handler_terminate(#state{handler=Handler, opts=Opts},
-               Req, HandlerState, TerminateReason) ->
-       try
-               Handler:websocket_terminate(TerminateReason, Req, HandlerState)
-       catch Class:Reason ->
-               error_logger:error_msg(
-                       "** Handler ~p terminating in websocket_terminate/3~n"
-                       "   for the reason ~p:~p~n** Initial reason was ~p~n"
-                       "** Options were ~p~n** Handler state was ~p~n"
-                       "** Request was ~p~n** Stacktrace: ~p~n~n",
-                       [Handler, Class, Reason, TerminateReason, Opts,
-                        HandlerState, Req, erlang:get_stacktrace()])
-       end,
-       closed.
-
-%% hixie-76 specific.
-
--spec hixie76_challenge(binary(), binary(), binary()) -> binary().
-hixie76_challenge(Key1, Key2, Key3) ->
-       IntKey1 = hixie76_key_to_integer(Key1),
-       IntKey2 = hixie76_key_to_integer(Key2),
-       erlang:md5(<< IntKey1:32, IntKey2:32, Key3/binary >>).
-
--spec hixie76_key_to_integer(binary()) -> integer().
-hixie76_key_to_integer(Key) ->
-       Number = list_to_integer([C || << C >> <= Key, C >= $0, C =< $9]),
-       Spaces = length([C || << C >> <= Key, C =:= 32]),
-       Number div Spaces.
-
--spec hixie76_location(atom(), binary(), inet:ip_port(), binary(), binary())
-       -> binary().
-hixie76_location(Protocol, Host, Port, Path, <<>>) ->
-    << (hixie76_location_protocol(Protocol))/binary, "://", Host/binary,
-       (hixie76_location_port(Protocol, Port))/binary, Path/binary>>;
-hixie76_location(Protocol, Host, Port, Path, QS) ->
-    << (hixie76_location_protocol(Protocol))/binary, "://", Host/binary,
-       (hixie76_location_port(Protocol, Port))/binary, Path/binary, "?", QS/binary >>.
-
--spec hixie76_location_protocol(atom()) -> binary().
-hixie76_location_protocol(ssl) -> <<"wss">>;
-hixie76_location_protocol(_)   -> <<"ws">>.
-
-%% @todo We should add a secure/0 function to transports
-%% instead of relying on their name.
--spec hixie76_location_port(atom(), inet:ip_port()) -> binary().
-hixie76_location_port(ssl, 443) ->
-       <<>>;
-hixie76_location_port(tcp, 80) ->
-       <<>>;
-hixie76_location_port(_, Port) ->
-       <<":", (list_to_binary(integer_to_list(Port)))/binary>>.
-
-%% hybi specific.
-
--spec hybi_challenge(binary()) -> binary().
-hybi_challenge(Key) ->
-       Bin = << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>,
-       base64:encode(crypto:sha(Bin)).
-
--spec hybi_payload_length(0..16#7fffffffffffffff)
-       -> << _:7 >> | << _:23 >> | << _:71 >>.
-hybi_payload_length(N) ->
-       case N of
-               N when N =< 125 -> << N:7 >>;
-               N when N =< 16#ffff -> << 126:7, N:16 >>;
-               N when N =< 16#7fffffffffffffff -> << 127:7, N:64 >>
-       end.
-
-%% Tests.
-
--ifdef(TEST).
-
-hixie76_location_test() ->
-       ?assertEqual(<<"ws://localhost/path">>,
-               hixie76_location(tcp, <<"localhost">>, 80, <<"/path">>, <<>>)),
-       ?assertEqual(<<"ws://localhost:443/path">>,
-               hixie76_location(tcp, <<"localhost">>, 443, <<"/path">>, <<>>)),
-       ?assertEqual(<<"ws://localhost:8080/path">>,
-               hixie76_location(tcp, <<"localhost">>, 8080, <<"/path">>, <<>>)),
-       ?assertEqual(<<"ws://localhost:8080/path?dummy=2785">>,
-               hixie76_location(tcp, <<"localhost">>, 8080, <<"/path">>, <<"dummy=2785">>)),
-       ?assertEqual(<<"wss://localhost/path">>,
-               hixie76_location(ssl, <<"localhost">>, 443, <<"/path">>, <<>>)),
-       ?assertEqual(<<"wss://localhost:8443/path">>,
-               hixie76_location(ssl, <<"localhost">>, 8443, <<"/path">>, <<>>)),
-       ?assertEqual(<<"wss://localhost:8443/path?dummy=2785">>,
-               hixie76_location(ssl, <<"localhost">>, 8443, <<"/path">>, <<"dummy=2785">>)),
-       ok.
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl
deleted file mode 100644 (file)
index 2ea0a46..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Handler for HTTP WebSocket requests.
-%%
-%% WebSocket handlers must implement four callbacks: <em>websocket_init/3</em>,
-%% <em>websocket_handle/3</em>, <em>websocket_info/3</em> and
-%% <em>websocket_terminate/3</em>. These callbacks will only be called if the
-%% connection is upgraded to WebSocket in the HTTP handler's <em>init/3</em>
-%% callback. They are then called in that order, although
-%% <em>websocket_handle/3</em> will be called for each packet received,
-%% and <em>websocket_info</em> for each message received.
-%%
-%% <em>websocket_init/3</em> is meant for initialization. It receives
-%% information about the transport and protocol used, along with the handler
-%% options from the dispatch list. You can define a request-wide state here.
-%% If you are going to want to compact the request, you should probably do it
-%% here.
-%%
-%% <em>websocket_handle/3</em> receives the data from the socket. It can reply
-%% something, do nothing or close the connection.
-%%
-%% <em>websocket_info/3</em> receives messages sent to the process. It has
-%% the same reply format as <em>websocket_handle/3</em> described above. Note
-%% that unlike in a <em>gen_server</em>, when <em>websocket_info/3</em>
-%% replies something, it is always to the socket, not to the process that
-%% originated the message.
-%%
-%% <em>websocket_terminate/3</em> is meant for cleaning up. It also receives
-%% the request and the state previously defined, along with a reason for
-%% termination.
-%%
-%% All of <em>websocket_init/3</em>, <em>websocket_handle/3</em> and
-%% <em>websocket_info/3</em> can decide to hibernate the process by adding
-%% an extra element to the returned tuple, containing the atom
-%% <em>hibernate</em>. Doing so helps save memory and improve CPU usage.
--module(cowboy_http_websocket_handler).
-
--export([behaviour_info/1]).
-
-%% @private
--spec behaviour_info(_)
-       -> undefined | [{websocket_handle, 3} | {websocket_info, 3}
-               | {websocket_init, 3} | {websocket_terminate, 3}, ...].
-behaviour_info(callbacks) ->
-       [{websocket_init, 3}, {websocket_handle, 3},
-        {websocket_info, 3}, {websocket_terminate, 3}];
-behaviour_info(_Other) ->
-       undefined.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl
deleted file mode 100644 (file)
index 4565b31..0000000
+++ /dev/null
@@ -1,174 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Public API for managing listeners.
--module(cowboy_listener).
--behaviour(gen_server).
-
--export([start_link/0, stop/1,
-       add_connection/3, move_connection/3, remove_connection/2, wait/3]). %% API.
--export([init/1, handle_call/3, handle_cast/2,
-       handle_info/2, terminate/2, code_change/3]). %% gen_server.
-
--record(state, {
-       req_pools = [] :: [{atom(), non_neg_integer()}],
-       reqs_table,
-       queue = []
-}).
-
-%% API.
-
-%% @private
-%%
-%% We set the process priority to high because cowboy_listener is the central
-%% gen_server in Cowboy and is used to manage all the incoming connections.
-%% Setting the process priority to high ensures the connection-related code
-%% will always be executed when a connection needs it, allowing Cowboy to
-%% scale far beyond what it would with a normal priority.
--spec start_link() -> {ok, pid()}.
-start_link() ->
-       gen_server:start_link(?MODULE, [], [{spawn_opt, [{priority, high}]}]).
-
-%% @private
--spec stop(pid()) -> stopped.
-stop(ServerPid) ->
-       gen_server:call(ServerPid, stop).
-
-%% @doc Add a connection to the given pool in the listener.
-%%
-%% Pools of connections are used to restrict the maximum number of connections
-%% depending on their type. By default, Cowboy add all connections to the
-%% pool <em>default</em>. It also checks for the maximum number of connections
-%% in that pool before accepting again.
-%%
-%% When a process managing a connection dies, the process is removed from the
-%% pool. If the socket has been sent to another process, it is up to the
-%% protocol code to inform the listener of the new <em>ConnPid</em> by removing
-%% the previous and adding the new one.
--spec add_connection(pid(), atom(), pid()) -> {ok, non_neg_integer()}.
-add_connection(ServerPid, Pool, ConnPid) ->
-       gen_server:call(ServerPid, {add_connection, Pool, ConnPid}).
-
-%% @doc Move a connection from one pool to another.
--spec move_connection(pid(), atom(), pid()) -> ok.
-move_connection(ServerPid, DestPool, ConnPid) ->
-       gen_server:cast(ServerPid, {move_connection, DestPool, ConnPid}).
-
-%% @doc Remove the given connection from its pool.
--spec remove_connection(pid(), pid()) -> ok.
-remove_connection(ServerPid, ConnPid) ->
-       gen_server:cast(ServerPid, {remove_connection, ConnPid}).
-
-%% @doc Wait until the number of connections in the given pool gets below
-%% the given threshold.
-%%
-%% This function will not return until the number of connections in the pool
-%% gets below <em>MaxConns</em>. It makes use of <em>gen_server:reply/2</em>
-%% to make the process wait for a reply indefinitely.
--spec wait(pid(), atom(), non_neg_integer()) -> ok.
-wait(ServerPid, Pool, MaxConns) ->
-       gen_server:call(ServerPid, {wait, Pool, MaxConns}, infinity).
-
-%% gen_server.
-
-%% @private
--spec init([]) -> {ok, #state{}}.
-init([]) ->
-       ReqsTablePid = ets:new(requests_table, [set, private]),
-       {ok, #state{reqs_table=ReqsTablePid}}.
-
-%% @private
--spec handle_call(_, _, State)
-       -> {reply, ignored, State} | {stop, normal, stopped, State}.
-handle_call({add_connection, Pool, ConnPid}, _From, State=#state{
-               req_pools=Pools, reqs_table=ReqsTable}) ->
-       MonitorRef = erlang:monitor(process, ConnPid),
-       {NbConnsRet, Pools2} = case lists:keyfind(Pool, 1, Pools) of
-               false ->
-                       {1, [{Pool, 1}|Pools]};
-               {Pool, NbConns} ->
-                       NbConns2 = NbConns + 1,
-                       {NbConns2, [{Pool, NbConns2}|lists:keydelete(Pool, 1, Pools)]}
-       end,
-       ets:insert(ReqsTable, {ConnPid, {MonitorRef, Pool}}),
-       {reply, {ok, NbConnsRet}, State#state{req_pools=Pools2}};
-handle_call({wait, Pool, MaxConns}, From, State=#state{
-               req_pools=Pools, queue=Queue}) ->
-       case lists:keyfind(Pool, 1, Pools) of
-               {Pool, NbConns} when NbConns > MaxConns ->
-                       {noreply, State#state{queue=[From|Queue]}};
-               _Any ->
-                       {reply, ok, State}
-       end;
-handle_call(stop, _From, State) ->
-       {stop, normal, stopped, State};
-handle_call(_Request, _From, State) ->
-       {reply, ignored, State}.
-
-%% @private
--spec handle_cast(_, State) -> {noreply, State}.
-handle_cast({move_connection, DestPool, ConnPid}, State=#state{
-               req_pools=Pools, reqs_table=ReqsTable}) ->
-       {MonitorRef, SrcPool} = ets:lookup_element(ReqsTable, ConnPid, 2),
-       ets:insert(ReqsTable, {ConnPid, {MonitorRef, DestPool}}),
-       {SrcPool, SrcNbConns} = lists:keyfind(SrcPool, 1, Pools),
-       DestNbConns = case lists:keyfind(DestPool, 1, Pools) of
-               false -> 1;
-               {DestPool, NbConns} -> NbConns + 1
-       end,
-       Pools2 = lists:keydelete(SrcPool, 1, lists:keydelete(DestPool, 1, Pools)),
-       Pools3 = [{SrcPool, SrcNbConns - 1}, {DestPool, DestNbConns}|Pools2],
-       {noreply, State#state{req_pools=Pools3}};
-handle_cast({remove_connection, ConnPid}, State) ->
-       State2 = remove_pid(ConnPid, State),
-       {noreply, State2};
-handle_cast(_Msg, State) ->
-       {noreply, State}.
-
-%% @private
--spec handle_info(_, State) -> {noreply, State}.
-handle_info({'DOWN', _Ref, process, Pid, _Info}, State) ->
-       State2 = remove_pid(Pid, State),
-       {noreply, State2};
-handle_info(_Info, State) ->
-       {noreply, State}.
-
-%% @private
--spec terminate(_, _) -> ok.
-terminate(_Reason, _State) ->
-       ok.
-
-%% @private
--spec code_change(_, State, _) -> {ok, State}.
-code_change(_OldVsn, State, _Extra) ->
-       {ok, State}.
-
-%% Internal.
-
-%% @private
--spec remove_pid(pid(), State) -> State.
-remove_pid(Pid, State=#state{
-               req_pools=Pools, reqs_table=ReqsTable, queue=Queue}) ->
-       {MonitorRef, Pool} = ets:lookup_element(ReqsTable, Pid, 2),
-       erlang:demonitor(MonitorRef, [flush]),
-       {Pool, NbConns} = lists:keyfind(Pool, 1, Pools),
-       Pools2 = [{Pool, NbConns - 1}|lists:keydelete(Pool, 1, Pools)],
-       ets:delete(ReqsTable, Pid),
-       case Queue of
-               [] ->
-                       State#state{req_pools=Pools2};
-               [Client|Queue2] ->
-                       gen_server:reply(Client, ok),
-                       State#state{req_pools=Pools2, queue=Queue2}
-       end.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl
deleted file mode 100644 (file)
index aca2b0b..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @private
--module(cowboy_listener_sup).
--behaviour(supervisor).
-
--export([start_link/5]). %% API.
--export([init/1]). %% supervisor.
-
-%% API.
-
--spec start_link(non_neg_integer(), module(), any(), module(), any())
-       -> {ok, pid()}.
-start_link(NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) ->
-       {ok, SupPid} = supervisor:start_link(?MODULE, []),
-       {ok, ListenerPid} = supervisor:start_child(SupPid,
-               {cowboy_listener, {cowboy_listener, start_link, []},
-                permanent, 5000, worker, [cowboy_listener]}),
-       {ok, ReqsPid} = supervisor:start_child(SupPid,
-               {cowboy_requests_sup, {cowboy_requests_sup, start_link, []},
-                permanent, 5000, supervisor, [cowboy_requests_sup]}),
-       {ok, _PoolPid} = supervisor:start_child(SupPid,
-               {cowboy_acceptors_sup, {cowboy_acceptors_sup, start_link, [
-                       NbAcceptors, Transport, TransOpts,
-                       Protocol, ProtoOpts, ListenerPid, ReqsPid
-               ]}, permanent, 5000, supervisor, [cowboy_acceptors_sup]}),
-       {ok, SupPid}.
-
-%% supervisor.
-
--spec init([]) -> {ok, {{one_for_all, 10, 10}, []}}.
-init([]) ->
-       {ok, {{one_for_all, 10, 10}, []}}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl
deleted file mode 100644 (file)
index 0bd123a..0000000
+++ /dev/null
@@ -1,249 +0,0 @@
-%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Multipart parser.
--module(cowboy_multipart).
-
--type part_parser() :: any().
--type parser(T) :: fun((binary()) -> T).
--type more(T) :: T | {more, parser(T)}.
--type part_result() :: any().
--type headers() :: any().
--type http_headers() :: [{atom() | binary(), binary()}].
--type body_cont() :: any().
--type cont(T) :: fun(() -> T).
--type body_result() :: any().
--type end_of_part() :: {end_of_part, cont(more(part_result()))}.
--type disposition() :: {binary(), [{binary(), binary()}]}.
-
--export([parser/1, content_disposition/1]).
-
--include_lib("eunit/include/eunit.hrl").
-
-%% API.
-
-%% @doc Return a multipart parser for the given boundary.
--spec parser(binary()) -> part_parser().
-parser(Boundary) when is_binary(Boundary) ->
-       fun (Bin) when is_binary(Bin) -> parse(Bin, Boundary) end.
-
-%% @doc Parse a content disposition.
-%% @todo Parse the MIME header instead of the HTTP one.
--spec content_disposition(binary()) -> disposition().
-content_disposition(Data) ->
-       cowboy_http:token_ci(Data,
-               fun (_Rest, <<>>) -> {error, badarg};
-                       (Rest, Disposition) ->
-                               cowboy_http:content_type_params(Rest,
-                                       fun (Params) -> {Disposition, Params} end, [])
-               end).
-
-%% Internal.
-
-%% @doc Entry point of the multipart parser, skips over the preamble if any.
--spec parse(binary(), binary()) -> more(part_result()).
-parse(Bin, Boundary) when byte_size(Bin) >= byte_size(Boundary) + 2 ->
-       BoundarySize = byte_size(Boundary),
-       Pattern = pattern(Boundary),
-       case Bin of
-               <<"--", Boundary:BoundarySize/binary, Rest/binary>> ->
-                       % Data starts with initial boundary, skip preamble parsing.
-                       parse_boundary_tail(Rest, Pattern);
-               _ ->
-                       % Parse preamble.
-                       skip(Bin, Pattern)
-       end;
-parse(Bin, Boundary) ->
-       % Not enough data to know if the data begins with a boundary.
-       more(Bin, fun (NewBin) -> parse(NewBin, Boundary) end).
-
--type pattern() :: {binary:cp(), non_neg_integer()}.
-
-%% @doc Return a compiled binary pattern with its size in bytes.
-%% The pattern is the boundary prepended with "\r\n--".
--spec pattern(binary()) -> pattern().
-pattern(Boundary) ->
-       MatchPattern = <<"\r\n--", Boundary/binary>>,
-       {binary:compile_pattern(MatchPattern), byte_size(MatchPattern)}.
-
-%% @doc Parse remaining characters of a line beginning with the boundary.
-%% If followed by "--", <em>eof</em> is returned and parsing is finished.
--spec parse_boundary_tail(binary(), pattern()) -> more(part_result()).
-parse_boundary_tail(Bin, Pattern) when byte_size(Bin) >= 2 ->
-       case Bin of
-               <<"--", _Rest/binary>> ->
-                       % Boundary is followed by "--", end parsing.
-                       eof;
-               _ ->
-                       % No dash after boundary, proceed with unknown chars and lwsp
-                       % removal.
-                       parse_boundary_eol(Bin, Pattern)
-       end;
-parse_boundary_tail(Bin, Pattern) ->
-       % Boundary may be followed by "--", need more data.
-       more(Bin, fun (NewBin) -> parse_boundary_tail(NewBin, Pattern) end).
-
-%% @doc Skip whitespace and unknown chars until CRLF.
--spec parse_boundary_eol(binary(), pattern()) -> more(part_result()).
-parse_boundary_eol(Bin, Pattern) ->
-       case binary:match(Bin, <<"\r\n">>) of
-               {CrlfStart, _Length} ->
-                       % End of line found, remove optional whitespace.
-                       <<_:CrlfStart/binary, Rest/binary>> = Bin,
-                       Fun = fun (Rest2) -> parse_boundary_crlf(Rest2, Pattern) end,
-                       cowboy_http:whitespace(Rest, Fun);
-               nomatch ->
-                       % CRLF not found in the given binary.
-                       RestStart = lists:max([byte_size(Bin) - 1, 0]),
-                       <<_:RestStart/binary, Rest/binary>> = Bin,
-                       more(Rest, fun (NewBin) -> parse_boundary_eol(NewBin, Pattern) end)
-       end.
-
--spec parse_boundary_crlf(binary(), pattern()) -> more(part_result()).
-parse_boundary_crlf(<<"\r\n", Rest/binary>>, Pattern) ->
-       % The binary is at least 2 bytes long as this function is only called by
-       % parse_boundary_eol/3 when CRLF has been found so a more tuple will never
-       % be returned from here.
-       parse_headers(Rest, Pattern);
-parse_boundary_crlf(Bin, Pattern) ->
-       % Unspecified behaviour here: RFC 2046 doesn't say what to do when LWSP is
-       % not followed directly by a new line. In this implementation it is
-       % considered part of the boundary so EOL needs to be searched again.
-       parse_boundary_eol(Bin, Pattern).
-
--spec parse_headers(binary(), pattern()) -> more(part_result()).
-parse_headers(Bin, Pattern) ->
-  parse_headers(Bin, Pattern, []).
-
--spec parse_headers(binary(), pattern(), http_headers()) -> more(part_result()).
-parse_headers(Bin, Pattern, Acc) ->
-       case erlang:decode_packet(httph_bin, Bin, []) of
-               {ok, {http_header, _, Name, _, Value}, Rest} ->
-                       parse_headers(Rest, Pattern, [{Name, Value} | Acc]);
-               {ok, http_eoh, Rest} ->
-                       Headers = lists:reverse(Acc),
-                       {headers, Headers, fun () -> parse_body(Rest, Pattern) end};
-               {ok, {http_error, _}, _} ->
-                       % Skip malformed parts.
-                       skip(Bin, Pattern);
-               {more, _} ->
-                       more(Bin, fun (NewBin) -> parse_headers(NewBin, Pattern, Acc) end)
-       end.
-
--spec parse_body(binary(), pattern()) -> more(body_result()).
-parse_body(Bin, Pattern = {P, PSize}) when byte_size(Bin) >= PSize ->
-       case binary:match(Bin, P) of
-               {0, _Length} ->
-                       <<_:PSize/binary, Rest/binary>> = Bin,
-                       end_of_part(Rest, Pattern);
-               {BoundaryStart, _Length} ->
-                       % Boundary found, this is the latest partial body that will be
-                       % returned for this part.
-                       <<PBody:BoundaryStart/binary, _:PSize/binary, Rest/binary>> = Bin,
-                       FResult = end_of_part(Rest, Pattern),
-                       {body, PBody, fun () -> FResult end};
-               nomatch ->
-                       PartialLength = byte_size(Bin) - PSize + 1,
-                       <<PBody:PartialLength/binary, Rest/binary>> = Bin,
-                       {body, PBody, fun () -> parse_body(Rest, Pattern) end}
-       end;
-parse_body(Bin, Pattern) ->
-       more(Bin, fun (NewBin) -> parse_body(NewBin, Pattern) end).
-
--spec end_of_part(binary(), pattern()) -> end_of_part().
-end_of_part(Bin, Pattern) ->
-       {end_of_part, fun () -> parse_boundary_tail(Bin, Pattern) end}.
-
--spec skip(binary(), pattern()) -> more(part_result()).
-skip(Bin, Pattern = {P, PSize}) ->
-       case binary:match(Bin, P) of
-               {BoundaryStart, _Length} ->
-                       % Boundary found, proceed with parsing of the next part.
-                       RestStart = BoundaryStart + PSize,
-                       <<_:RestStart/binary, Rest/binary>> = Bin,
-                       parse_boundary_tail(Rest, Pattern);
-               nomatch ->
-                       % Boundary not found, need more data.
-                       RestStart = lists:max([byte_size(Bin) - PSize + 1, 0]),
-                       <<_:RestStart/binary, Rest/binary>> = Bin,
-                       more(Rest, fun (NewBin) -> skip(NewBin, Pattern) end)
-       end.
-
--spec more(binary(), parser(T)) -> {more, parser(T)}.
-more(<<>>, F) ->
-       {more, F};
-more(Bin, InnerF) ->
-       F = fun (NewData) when is_binary(NewData) ->
-                               InnerF(<<Bin/binary, NewData/binary>>)
-               end,
-       {more, F}.
-
-%% Tests.
-
--ifdef(TEST).
-
-multipart_test_() ->
-       %% {Body, Result}
-       Tests = [
-               {<<"--boundary--">>, []},
-               {<<"preamble\r\n--boundary--">>, []},
-               {<<"--boundary--\r\nepilogue">>, []},
-               {<<"\r\n--boundary\r\nA:b\r\nC:d\r\n\r\n\r\n--boundary--">>,
-                       [{[{<<"A">>, <<"b">>}, {<<"C">>, <<"d">>}], <<>>}]},
-               {
-                       <<
-                               "--boundary\r\nX-Name:answer\r\n\r\n42"
-                               "\r\n--boundary\r\nServer:Cowboy\r\n\r\nIt rocks!\r\n"
-                               "\r\n--boundary--"
-                       >>,
-                       [
-                               {[{<<"X-Name">>, <<"answer">>}], <<"42">>},
-                               {[{'Server', <<"Cowboy">>}], <<"It rocks!\r\n">>}
-                       ]
-               }
-       ],
-       [{title(V), fun () -> R = acc_multipart(V) end} || {V, R} <- Tests].
-
-acc_multipart(V) ->
-       acc_multipart((parser(<<"boundary">>))(V), []).
-
-acc_multipart({headers, Headers, Cont}, Acc) ->
-       acc_multipart(Cont(), [{Headers, []}|Acc]);
-acc_multipart({body, Body, Cont}, [{Headers, BodyAcc}|Acc]) ->
-       acc_multipart(Cont(), [{Headers, [Body|BodyAcc]}|Acc]);
-acc_multipart({end_of_part, Cont}, [{Headers, BodyAcc}|Acc]) ->
-       Body = list_to_binary(lists:reverse(BodyAcc)),
-       acc_multipart(Cont(), [{Headers, Body}|Acc]);
-acc_multipart(eof, Acc) ->
-       lists:reverse(Acc).
-
-content_disposition_test_() ->
-       %% {Disposition, Result}
-       Tests = [
-               {<<"form-data; name=id">>, {<<"form-data">>, [{<<"name">>, <<"id">>}]}},
-               {<<"inline">>, {<<"inline">>, []}},
-               {<<"attachment; \tfilename=brackets-slides.pdf">>,
-                       {<<"attachment">>, [{<<"filename">>, <<"brackets-slides.pdf">>}]}}
-       ],
-       [{title(V), fun () -> R = content_disposition(V) end} || {V, R} <- Tests].
-
-title(Bin) ->
-       Title = lists:foldl(
-               fun ({T, R}, V) -> re:replace(V, T, R, [global]) end,
-               Bin,
-               [{"\t", "\\\\t"}, {"\r", "\\\\r"}, {"\n", "\\\\n"}]
-       ),
-       iolist_to_binary(Title).
-
--endif.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl
deleted file mode 100644 (file)
index 34bb1a1..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%% Copyright (c) 2011, Michiel Hakvoort <michiel@hakvoort.it>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc Cowboy protocol.
-%%
-%% A Cowboy protocol must implement one callback: <em>start_link/4</em>.
-%%
-%% <em>start_link/4</em> is meant for the initialization of the
-%% protocol process.
-%% It receives the pid to the listener's gen_server, the client socket,
-%% the module name of the chosen transport and the options defined when
-%% starting the listener. The <em>start_link/4</em> function must follow
-%% the supervisor start function specification.
-%%
-%% After initializing your protocol, it is recommended to call the
-%% function cowboy:accept_ack/1 with the ListenerPid as argument,
-%% as it will ensure Cowboy has been able to fully initialize the socket.
-%% Anything you do past this point is up to you!
-%%
-%% If you need to change some socket options, like enabling raw mode
-%% for example, you can call the <em>Transport:setopts/2</em> function.
-%% It is the protocol's responsability to manage the socket usage,
-%% there should be no need for an user to specify that kind of options
-%% while starting a listener.
-%%
-%% You should definitely look at the cowboy_http_protocol module for
-%% a great example of fast request handling if you need to.
-%% Otherwise it's probably safe to use <code>{active, once}</code> mode
-%% and handle everything as it comes.
-%%
-%% Note that while you technically can run a protocol handler directly
-%% as a gen_server or a gen_fsm, it's probably not a good idea,
-%% as the only call you'll ever receive from Cowboy is the
-%% <em>start_link/4</em> call. On the other hand, feel free to write
-%% a very basic protocol handler which then forwards requests to a
-%% gen_server or gen_fsm. By doing so however you must take care to
-%% supervise their processes as Cowboy only knows about the protocol
-%% handler itself.
--module(cowboy_protocol).
-
--export([behaviour_info/1]).
-
-%% @private
--spec behaviour_info(_)
-       -> undefined | [{start_link, 4}, ...].
-behaviour_info(callbacks) ->
-       [{start_link, 4}];
-behaviour_info(_Other) ->
-       undefined.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_requests_sup.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_requests_sup.erl
deleted file mode 100644 (file)
index 87d5352..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @private
--module(cowboy_requests_sup).
--behaviour(supervisor).
-
--export([start_link/0, start_request/5]). %% API.
--export([init/1]). %% supervisor.
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
-       supervisor:start_link(?MODULE, []).
-
--spec start_request(pid(), inet:socket(), module(), module(), any())
-       -> {ok, pid()}.
-start_request(ListenerPid, Socket, Transport, Protocol, Opts) ->
-       Protocol:start_link(ListenerPid, Socket, Transport, Opts).
-
-%% supervisor.
-
--spec init([]) -> {ok, {{simple_one_for_one, 0, 1}, [{_, _, _, _, _, _}, ...]}}.
-init([]) ->
-       {ok, {{simple_one_for_one, 0, 1}, [{?MODULE, {?MODULE, start_request, []},
-               temporary, brutal_kill, worker, [?MODULE]}]}}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl
deleted file mode 100644 (file)
index bf8b1fb..0000000
+++ /dev/null
@@ -1,164 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc SSL transport API.
-%%
-%% Wrapper around <em>ssl</em> implementing the Cowboy transport API.
-%%
-%% This transport requires the <em>crypto</em>, <em>public_key</em>
-%% and <em>ssl</em> applications to be started. If they aren't started,
-%% it will try to start them itself before opening a port to listen.
-%% Applications aren't stopped when the listening socket is closed, though.
-%%
-%% @see ssl
--module(cowboy_ssl_transport).
--export([name/0, messages/0, listen/1, accept/2, recv/3, send/2, setopts/2,
-       controlling_process/2, peername/1, close/1]).
-
-%% @doc Name of this transport API, <em>ssl</em>.
--spec name() -> ssl.
-name() -> ssl.
-
-%% @doc Atoms used in the process messages sent by this API.
-%%
-%% They identify incoming data, closed connection and errors when receiving
-%% data in active mode.
--spec messages() -> {ssl, ssl_closed, ssl_error}.
-messages() -> {ssl, ssl_closed, ssl_error}.
-
-%% @doc Setup a socket to listen on the given port on the local host.
-%%
-%% The available options are:
-%% <dl>
-%%  <dt>port</dt><dd>Mandatory. TCP port number to open.</dd>
-%%  <dt>backlog</dt><dd>Maximum length of the pending connections queue.
-%%   Defaults to 1024.</dd>
-%%  <dt>ip</dt><dd>Interface to listen on. Listen on all interfaces
-%%   by default.</dd>
-%%  <dt>certfile</dt><dd>Mandatory. Path to a file containing the user's
-%%   certificate.</dd>
-%%  <dt>keyfile</dt><dd>Mandatory. Path to the file containing the user's
-%%   private PEM encoded key.</dd>
-%%  <dt>cacertfile</dt><dd>Optional. Path to file containing PEM encoded
-%%   CA certificates (trusted certificates used for verifying a peer
-%%   certificate).</dd>
-%%  <dt>password</dt><dd>Mandatory. String containing the user's password.
-%%   All private keyfiles must be password protected currently.</dd>
-%% </dl>
-%%
-%% @see ssl:listen/2
-%% @todo The password option shouldn't be mandatory.
--spec listen([{port, inet:ip_port()} | {certfile, string()}
-       | {keyfile, string()} | {password, string()}
-       | {cacertfile, string()} | {ip, inet:ip_address()}])
-       -> {ok, ssl:sslsocket()} | {error, atom()}.
-listen(Opts) ->
-       require([crypto, public_key, ssl]),
-       {port, Port} = lists:keyfind(port, 1, Opts),
-       Backlog = proplists:get_value(backlog, Opts, 1024),
-       {certfile, CertFile} = lists:keyfind(certfile, 1, Opts),
-       {keyfile, KeyFile} = lists:keyfind(keyfile, 1, Opts),
-       {password, Password} = lists:keyfind(password, 1, Opts),
-       ListenOpts0 = [binary, {active, false},
-               {backlog, Backlog}, {packet, raw}, {reuseaddr, true},
-               {certfile, CertFile}, {keyfile, KeyFile}, {password, Password}],
-       ListenOpts1 =
-               case lists:keyfind(ip, 1, Opts) of
-                       false -> ListenOpts0;
-                       Ip -> [Ip|ListenOpts0]
-               end,
-       ListenOpts =
-               case lists:keyfind(cacertfile, 1, Opts) of
-                       false -> ListenOpts1;
-                       CACertFile -> [CACertFile|ListenOpts1]
-               end,
-       ssl:listen(Port, ListenOpts).
-
-%% @doc Accept an incoming connection on a listen socket.
-%%
-%% Note that this function does both the transport accept and
-%% the SSL handshake.
-%%
-%% @see ssl:transport_accept/2
-%% @see ssl:ssl_accept/2
--spec accept(ssl:sslsocket(), timeout())
-       -> {ok, ssl:sslsocket()} | {error, closed | timeout | atom()}.
-accept(LSocket, Timeout) ->
-       case ssl:transport_accept(LSocket, Timeout) of
-               {ok, CSocket} ->
-                       ssl_accept(CSocket, Timeout);
-               {error, Reason} ->
-                       {error, Reason}
-       end.
-
-%% @doc Receive a packet from a socket in passive mode.
-%% @see ssl:recv/3
--spec recv(ssl:sslsocket(), non_neg_integer(), timeout())
-       -> {ok, any()} | {error, closed | atom()}.
-recv(Socket, Length, Timeout) ->
-       ssl:recv(Socket, Length, Timeout).
-
-%% @doc Send a packet on a socket.
-%% @see ssl:send/2
--spec send(ssl:sslsocket(), iolist()) -> ok | {error, atom()}.
-send(Socket, Packet) ->
-       ssl:send(Socket, Packet).
-
-%% @doc Set one or more options for a socket.
-%% @see ssl:setopts/2
--spec setopts(ssl:sslsocket(), list()) -> ok | {error, atom()}.
-setopts(Socket, Opts) ->
-       ssl:setopts(Socket, Opts).
-
-%% @doc Assign a new controlling process <em>Pid</em> to <em>Socket</em>.
-%% @see ssl:controlling_process/2
--spec controlling_process(ssl:sslsocket(), pid())
-       -> ok | {error, closed | not_owner | atom()}.
-controlling_process(Socket, Pid) ->
-       ssl:controlling_process(Socket, Pid).
-
-%% @doc Return the address and port for the other end of a connection.
-%% @see ssl:peername/1
--spec peername(ssl:sslsocket())
-       -> {ok, {inet:ip_address(), inet:ip_port()}} | {error, atom()}.
-peername(Socket) ->
-       ssl:peername(Socket).
-
-%% @doc Close a TCP socket.
-%% @see ssl:close/1
--spec close(ssl:sslsocket()) -> ok.
-close(Socket) ->
-       ssl:close(Socket).
-
-%% Internal.
-
--spec require(list(module())) -> ok.
-require([]) ->
-       ok;
-require([App|Tail]) ->
-       case application:start(App) of
-               ok -> ok;
-               {error, {already_started, App}} -> ok
-       end,
-       require(Tail).
-
--spec ssl_accept(ssl:sslsocket(), timeout())
-       -> {ok, ssl:sslsocket()} | {error, closed | timeout | atom()}.
-ssl_accept(Socket, Timeout) ->
-       case ssl:ssl_accept(Socket, Timeout) of
-               ok ->
-                       {ok, Socket};
-               {error, Reason} ->
-                       {error, Reason}
-       end.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl
deleted file mode 100644 (file)
index c1dad62..0000000
+++ /dev/null
@@ -1,106 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-%% @doc TCP transport API.
-%%
-%% Wrapper around <em>gen_tcp</em> implementing the Cowboy transport API.
-%%
-%% @see gen_tcp
--module(cowboy_tcp_transport).
-
--export([name/0, messages/0, listen/1, accept/2, recv/3, send/2, setopts/2,
-       controlling_process/2, peername/1, close/1]).
-
-%% @doc Name of this transport API, <em>tcp</em>.
--spec name() -> tcp.
-name() -> tcp.
-
-%% @doc Atoms used in the process messages sent by this API.
-%%
-%% They identify incoming data, closed connection and errors when receiving
-%% data in active mode.
--spec messages() -> {tcp, tcp_closed, tcp_error}.
-messages() -> {tcp, tcp_closed, tcp_error}.
-
-%% @doc Setup a socket to listen on the given port on the local host.
-%%
-%% The available options are:
-%% <dl>
-%%  <dt>port</dt><dd>Mandatory. TCP port number to open.</dd>
-%%  <dt>backlog</dt><dd>Maximum length of the pending connections queue.
-%%   Defaults to 1024.</dd>
-%%  <dt>ip</dt><dd>Interface to listen on. Listen on all interfaces
-%%   by default.</dd>
-%% </dl>
-%%
-%% @see gen_tcp:listen/2
--spec listen([{port, inet:ip_port()} | {ip, inet:ip_address()}])
-       -> {ok, inet:socket()} | {error, atom()}.
-listen(Opts) ->
-       {port, Port} = lists:keyfind(port, 1, Opts),
-       Backlog = proplists:get_value(backlog, Opts, 1024),
-       ListenOpts0 = [binary, {active, false},
-               {backlog, Backlog}, {packet, raw}, {reuseaddr, true}],
-       ListenOpts =
-               case lists:keyfind(ip, 1, Opts) of
-                       false -> ListenOpts0;
-                       Ip -> [Ip|ListenOpts0]
-               end,
-       gen_tcp:listen(Port, ListenOpts).
-
-%% @doc Accept an incoming connection on a listen socket.
-%% @see gen_tcp:accept/2
--spec accept(inet:socket(), timeout())
-       -> {ok, inet:socket()} | {error, closed | timeout | atom()}.
-accept(LSocket, Timeout) ->
-       gen_tcp:accept(LSocket, Timeout).
-
-%% @doc Receive a packet from a socket in passive mode.
-%% @see gen_tcp:recv/3
--spec recv(inet:socket(), non_neg_integer(), timeout())
-       -> {ok, any()} | {error, closed | atom()}.
-recv(Socket, Length, Timeout) ->
-       gen_tcp:recv(Socket, Length, Timeout).
-
-%% @doc Send a packet on a socket.
-%% @see gen_tcp:send/2
--spec send(inet:socket(), iolist()) -> ok | {error, atom()}.
-send(Socket, Packet) ->
-       gen_tcp:send(Socket, Packet).
-
-%% @doc Set one or more options for a socket.
-%% @see inet:setopts/2
--spec setopts(inet:socket(), list()) -> ok | {error, atom()}.
-setopts(Socket, Opts) ->
-       inet:setopts(Socket, Opts).
-
-%% @doc Assign a new controlling process <em>Pid</em> to <em>Socket</em>.
-%% @see gen_tcp:controlling_process/2
--spec controlling_process(inet:socket(), pid())
-       -> ok | {error, closed | not_owner | atom()}.
-controlling_process(Socket, Pid) ->
-       gen_tcp:controlling_process(Socket, Pid).
-
-%% @doc Return the address and port for the other end of a connection.
-%% @see inet:peername/1
--spec peername(inet:socket())
-       -> {ok, {inet:ip_address(), inet:ip_port()}} | {error, atom()}.
-peername(Socket) ->
-       inet:peername(Socket).
-
-%% @doc Close a TCP socket.
-%% @see gen_tcp:close/1
--spec close(inet:socket()) -> ok.
-close(Socket) ->
-       gen_tcp:close(Socket).
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl
deleted file mode 100644 (file)
index d246d51..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(chunked_handler).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
-init({_Transport, http}, Req, _Opts) ->
-       {ok, Req, undefined}.
-
-handle(Req, State) ->
-       {ok, Req2} = cowboy_http_req:chunked_reply(200, Req),
-       cowboy_http_req:chunk("chunked_handler\r\n", Req2),
-       cowboy_http_req:chunk("works fine!", Req2),
-       {ok, Req2, State}.
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl
deleted file mode 100644 (file)
index b6a1c92..0000000
+++ /dev/null
@@ -1,68 +0,0 @@
-%% Copyright (c) 2011, Magnus Klaar <magnus.klaar@gmail.com>
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--module(dispatcher_prop).
--include_lib("proper/include/proper.hrl").
-
-%% Generators.
-
-hostname_head_char() ->
-       oneof([choose($a, $z), choose($A, $Z), choose($0, $9)]).
-
-hostname_char() ->
-       oneof([choose($a, $z), choose($A, $Z), choose($0, $9), $-]).
-
-hostname_label() ->
-       ?SUCHTHAT(Label, [hostname_head_char()|list(hostname_char())],
-               length(Label) < 64).
-
-hostname() ->
-       ?SUCHTHAT(Hostname,
-               ?LET(Labels, list(hostname_label()), string:join(Labels, ".")),
-               length(Hostname) > 0 andalso length(Hostname) =< 255).
-
-port_number() ->
-       choose(1, 16#ffff).
-
-port_str() ->
-       oneof(["", ?LET(Port, port_number(), ":" ++ integer_to_list(Port))]).
-
-server() ->
-       ?LET({Hostname, PortStr}, {hostname(), port_str()},
-               list_to_binary(Hostname ++ PortStr)).
-
-%% Properties.
-
-prop_split_host_symmetric() ->
-       ?FORALL(Server, server(),
-       begin case cowboy_dispatcher:split_host(Server) of
-                       {Tokens, RawHost, undefined} ->
-                               (Server == RawHost) and (Server == binary_join(Tokens, "."));
-                       {Tokens, RawHost, Port} ->
-                               PortBin = (list_to_binary(":" ++ integer_to_list(Port))),
-                               (Server == << RawHost/binary, PortBin/binary >>)
-                               and (Server == << (binary_join(Tokens, "."))/binary,
-                                       PortBin/binary >>)
-       end end).
-
-%% Internal.
-
-%% Contributed by MononcQc on #erlounge.
-binary_join(Flowers, Leaf) ->
-       case Flowers of
-               [] -> <<>>;
-               [Petal|Pot] -> iolist_to_binary(
-                       [Petal | [[Leaf | Pollen] || Pollen <- Pot]])
-       end.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl
deleted file mode 100644 (file)
index bad91a8..0000000
+++ /dev/null
@@ -1,613 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%% Copyright (c) 2011, Anthony Ramine <nox@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--module(http_SUITE).
-
--include_lib("common_test/include/ct.hrl").
-
--export([all/0, groups/0, init_per_suite/1, end_per_suite/1,
-       init_per_group/2, end_per_group/2]). %% ct.
--export([chunked_response/1, headers_dupe/1, headers_huge/1,
-       keepalive_nl/1, max_keepalive/1, nc_rand/1, nc_zero/1,
-       pipeline/1, raw/1, set_resp_header/1, set_resp_overwrite/1,
-       set_resp_body/1, stream_body_set_resp/1, response_as_req/1,
-       static_mimetypes_function/1, static_attribute_etag/1,
-       static_function_etag/1, multipart/1]). %% http.
--export([http_200/1, http_404/1, handler_errors/1,
-       file_200/1, file_403/1, dir_403/1, file_404/1,
-       file_400/1]). %% http and https.
--export([http_10_hostless/1]). %% misc.
--export([rest_simple/1, rest_keepalive/1, rest_keepalive_post/1]). %% rest.
-
-%% ct.
-
-all() ->
-       [{group, http}, {group, https}, {group, misc}, {group, rest}].
-
-groups() ->
-       BaseTests = [http_200, http_404, handler_errors,
-               file_200, file_403, dir_403, file_404, file_400],
-       [{http, [], [chunked_response, headers_dupe, headers_huge,
-               keepalive_nl, max_keepalive, nc_rand, nc_zero, pipeline, raw,
-               set_resp_header, set_resp_overwrite,
-               set_resp_body, response_as_req, stream_body_set_resp,
-               static_mimetypes_function, static_attribute_etag,
-               static_function_etag, multipart] ++ BaseTests},
-       {https, [], BaseTests},
-       {misc, [], [http_10_hostless]},
-       {rest, [], [rest_simple, rest_keepalive, rest_keepalive_post]}].
-
-init_per_suite(Config) ->
-       application:start(inets),
-       application:start(cowboy),
-       Config.
-
-end_per_suite(_Config) ->
-       application:stop(cowboy),
-       application:stop(inets),
-       ok.
-
-init_per_group(http, Config) ->
-       Port = 33080,
-       Config1 = init_static_dir(Config),
-       cowboy:start_listener(http, 100,
-               cowboy_tcp_transport, [{port, Port}],
-               cowboy_http_protocol, [{max_keepalive, 50},
-                       {dispatch, init_http_dispatch(Config1)}]
-       ),
-       [{scheme, "http"}, {port, Port}|Config1];
-init_per_group(https, Config) ->
-       Port = 33081,
-       Config1 = init_static_dir(Config),
-       application:start(crypto),
-       application:start(public_key),
-       application:start(ssl),
-       DataDir = ?config(data_dir, Config),
-       cowboy:start_listener(https, 100,
-               cowboy_ssl_transport, [
-                       {port, Port}, {certfile, DataDir ++ "cert.pem"},
-                       {keyfile, DataDir ++ "key.pem"}, {password, "cowboy"}],
-               cowboy_http_protocol, [{dispatch, init_https_dispatch(Config1)}]
-       ),
-       [{scheme, "https"}, {port, Port}|Config1];
-init_per_group(misc, Config) ->
-       Port = 33082,
-       cowboy:start_listener(misc, 100,
-               cowboy_tcp_transport, [{port, Port}],
-               cowboy_http_protocol, [{dispatch, [{'_', [
-                       {[], http_handler, []}
-       ]}]}]),
-       [{port, Port}|Config];
-init_per_group(rest, Config) ->
-       Port = 33083,
-       cowboy:start_listener(reset, 100,
-               cowboy_tcp_transport, [{port, Port}],
-               cowboy_http_protocol, [{dispatch, [{'_', [
-                       {[<<"simple">>], rest_simple_resource, []},
-                       {[<<"forbidden_post">>], rest_forbidden_resource, [true]},
-                       {[<<"simple_post">>], rest_forbidden_resource, [false]}
-       ]}]}]),
-       [{port, Port}|Config].
-
-end_per_group(https, Config) ->
-       cowboy:stop_listener(https),
-       application:stop(ssl),
-       application:stop(public_key),
-       application:stop(crypto),
-       end_static_dir(Config),
-       ok;
-end_per_group(http, Config) ->
-       cowboy:stop_listener(http),
-       end_static_dir(Config);
-end_per_group(Listener, _Config) ->
-       cowboy:stop_listener(Listener),
-       ok.
-
-%% Dispatch configuration.
-
-init_http_dispatch(Config) ->
-       [
-               {[<<"localhost">>], [
-                       {[<<"chunked_response">>], chunked_handler, []},
-                       {[<<"init_shutdown">>], http_handler_init_shutdown, []},
-                       {[<<"long_polling">>], http_handler_long_polling, []},
-                       {[<<"headers">>, <<"dupe">>], http_handler,
-                               [{headers, [{<<"Connection">>, <<"close">>}]}]},
-                       {[<<"set_resp">>, <<"header">>], http_handler_set_resp,
-                               [{headers, [{<<"Vary">>, <<"Accept">>}]}]},
-                       {[<<"set_resp">>, <<"overwrite">>], http_handler_set_resp,
-                               [{headers, [{<<"Server">>, <<"DesireDrive/1.0">>}]}]},
-                       {[<<"set_resp">>, <<"body">>], http_handler_set_resp,
-                               [{body, <<"A flameless dance does not equal a cycle">>}]},
-                       {[<<"stream_body">>, <<"set_resp">>], http_handler_stream_body,
-                               [{reply, set_resp}, {body, <<"stream_body_set_resp">>}]},
-                       {[<<"static">>, '...'], cowboy_http_static,
-                               [{directory, ?config(static_dir, Config)},
-                                {mimetypes, [{<<".css">>, [<<"text/css">>]}]}]},
-                       {[<<"static_mimetypes_function">>, '...'], cowboy_http_static,
-                               [{directory, ?config(static_dir, Config)},
-                                {mimetypes, {fun(Path, data) when is_binary(Path) ->
-                                       [<<"text/html">>] end, data}}]},
-                       {[<<"handler_errors">>], http_handler_errors, []},
-                       {[<<"static_attribute_etag">>, '...'], cowboy_http_static,
-                               [{directory, ?config(static_dir, Config)},
-                                {etag, {attributes, [filepath, filesize, inode, mtime]}}]},
-                       {[<<"static_function_etag">>, '...'], cowboy_http_static,
-                               [{directory, ?config(static_dir, Config)},
-                                {etag, {fun static_function_etag/2, etag_data}}]},
-                       {[<<"multipart">>], http_handler_multipart, []},
-                       {[], http_handler, []}
-               ]}
-       ].
-
-init_https_dispatch(Config) ->
-       init_http_dispatch(Config).
-
-
-init_static_dir(Config) ->
-       Dir = filename:join(?config(priv_dir, Config), "static"),
-       Level1 = fun(Name) -> filename:join(Dir, Name) end,
-       ok = file:make_dir(Dir),
-       ok = file:write_file(Level1("test_file"), "test_file\n"),
-       ok = file:write_file(Level1("test_file.css"), "test_file.css\n"),
-       ok = file:write_file(Level1("test_noread"), "test_noread\n"),
-       ok = file:change_mode(Level1("test_noread"), 8#0333),
-       ok = file:write_file(Level1("test.html"), "test.html\n"),
-       ok = file:make_dir(Level1("test_dir")),
-       [{static_dir, Dir}|Config].
-
-end_static_dir(Config) ->
-       Dir = ?config(static_dir, Config),
-       Level1 = fun(Name) -> filename:join(Dir, Name) end,
-       ok = file:delete(Level1("test_file")),
-       ok = file:delete(Level1("test_file.css")),
-       ok = file:delete(Level1("test_noread")),
-       ok = file:delete(Level1("test.html")),
-       ok = file:del_dir(Level1("test_dir")),
-       ok = file:del_dir(Dir),
-       Config.
-
-%% http.
-
-chunked_response(Config) ->
-       {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, "chunked_handler\r\nworks fine!"}} =
-               httpc:request(build_url("/chunked_response", Config)).
-
-headers_dupe(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, "GET /headers/dupe HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: keep-alive\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {_Start, _Length} = binary:match(Data, <<"Connection: close">>),
-       nomatch = binary:match(Data, <<"Connection: keep-alive">>),
-       {error, closed} = gen_tcp:recv(Socket, 0, 1000).
-
-headers_huge(Config) ->
-       Cookie = lists:flatten(["whatever_man_biiiiiiiiiiiig_cookie_me_want_77="
-               "Wed Apr 06 2011 10:38:52 GMT-0500 (CDT)" || _N <- lists:seq(1, 40)]),
-       {_Packet, 200} = raw_req(["GET / HTTP/1.0\r\nHost: localhost\r\n"
-               "Set-Cookie: ", Cookie, "\r\n\r\n"], Config).
-
-keepalive_nl(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = keepalive_nl_loop(Socket, 10),
-       ok = gen_tcp:close(Socket).
-
-keepalive_nl_loop(_Socket, 0) ->
-       ok;
-keepalive_nl_loop(Socket, N) ->
-       ok = gen_tcp:send(Socket, "GET / HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: keep-alive\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {0, 12} = binary:match(Data, <<"HTTP/1.1 200">>),
-       nomatch = binary:match(Data, <<"Connection: close">>),
-       ok = gen_tcp:send(Socket, "\r\n"), %% extra nl
-       keepalive_nl_loop(Socket, N - 1).
-
-max_keepalive(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = max_keepalive_loop(Socket, 50),
-       {error, closed} = gen_tcp:recv(Socket, 0, 1000).
-
-max_keepalive_loop(_Socket, 0) ->
-       ok;
-max_keepalive_loop(Socket, N) ->
-       ok = gen_tcp:send(Socket, "GET / HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: keep-alive\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {0, 12} = binary:match(Data, <<"HTTP/1.1 200">>),
-       case N of
-               1 -> {_, _} = binary:match(Data, <<"Connection: close">>);
-               N -> nomatch = binary:match(Data, <<"Connection: close">>)
-       end,
-       keepalive_nl_loop(Socket, N - 1).
-
-multipart(Config) ->
-       Url = build_url("/multipart", Config),
-       Body = <<
-               "This is a preamble."
-               "\r\n--OHai\r\nX-Name:answer\r\n\r\n42"
-               "\r\n--OHai\r\nServer:Cowboy\r\n\r\nIt rocks!\r\n"
-               "\r\n--OHai--"
-               "This is an epiloque."
-       >>,
-       Request = {Url, [], "multipart/x-makes-no-sense; boundary=OHai", Body},
-       {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, Response}} =
-               httpc:request(post, Request, [], [{body_format, binary}]),
-       Parts = binary_to_term(Response),
-       Parts = [
-               {[{<<"X-Name">>, <<"answer">>}], <<"42">>},
-               {[{'Server', <<"Cowboy">>}], <<"It rocks!\r\n">>}
-       ].
-
-nc_rand(Config) ->
-       nc_reqs(Config, "/dev/urandom").
-
-nc_zero(Config) ->
-       nc_reqs(Config, "/dev/zero").
-
-nc_reqs(Config, Input) ->
-       Cat = os:find_executable("cat"),
-       Nc = os:find_executable("nc"),
-       case {Cat, Nc} of
-               {false, _} ->
-                       {skip, {notfound, cat}};
-               {_, false} ->
-                       {skip, {notfound, nc}};
-               _Good ->
-                       %% Throw garbage at the server then check if it's still up.
-                       {port, Port} = lists:keyfind(port, 1, Config),
-                       [nc_run_req(Port, Input) || _N <- lists:seq(1, 100)],
-                       Packet = "GET / HTTP/1.0\r\nHost: localhost\r\n\r\n",
-                       {Packet, 200} = raw_req(Packet, Config)
-       end.
-
-nc_run_req(Port, Input) ->
-       os:cmd("cat " ++ Input ++ " | nc localhost " ++ integer_to_list(Port)).
-
-pipeline(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket,
-               "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n"
-               "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n"
-               "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n"
-               "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n"
-               "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n"),
-       Data = pipeline_recv(Socket, <<>>),
-       Reqs = binary:split(Data, << "\r\n\r\nhttp_handler" >>, [global, trim]),
-       5 = length(Reqs),
-       pipeline_check(Reqs).
-
-pipeline_check([]) ->
-       ok;
-pipeline_check([Req|Tail]) ->
-       << "HTTP/1.1 200", _Rest/bits >> = Req,
-       pipeline_check(Tail).
-
-pipeline_recv(Socket, SoFar) ->
-       case gen_tcp:recv(Socket, 0, 6000) of
-               {ok, Data} ->
-                       pipeline_recv(Socket, << SoFar/binary, Data/binary >>);
-               {error, closed} ->
-                       ok = gen_tcp:close(Socket),
-                       SoFar
-       end.
-
-raw_req(Packet, Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, Packet),
-       Res = case gen_tcp:recv(Socket, 0, 6000) of
-               {ok, << "HTTP/1.1 ", Str:24/bits, _Rest/bits >>} ->
-                       list_to_integer(binary_to_list(Str));
-               {error, Reason} ->
-                       Reason
-       end,
-       gen_tcp:close(Socket),
-       {Packet, Res}.
-
-%% Send a raw request. Return the response code and the full response.
-raw_resp(Request, Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       Transport = case ?config(scheme, Config) of
-               "http" -> gen_tcp;
-               "https" -> ssl
-       end,
-       {ok, Socket} = Transport:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = Transport:send(Socket, Request),
-       {StatusCode,  Response} = case recv_loop(Transport, Socket, <<>>) of
-               {ok, << "HTTP/1.1 ", Str:24/bits, _Rest/bits >> = Bin} ->
-                       {list_to_integer(binary_to_list(Str)), Bin};
-               {ok, Bin} ->
-                       {badresp, Bin};
-               {error, Reason} ->
-                       {Reason, <<>>}
-       end,
-       Transport:close(Socket),
-       {Response, StatusCode}.
-
-recv_loop(Transport, Socket, Acc) ->
-       case Transport:recv(Socket, 0, 6000) of
-               {ok, Data} ->
-                       recv_loop(Transport, Socket, <<Acc/binary, Data/binary>>);
-               {error, closed} ->
-                       ok = Transport:close(Socket),
-                       {ok, Acc};
-               {error, Reason} ->
-                       {error, Reason}
-       end.
-
-
-
-raw(Config) ->
-       Huge = [$0 || _N <- lists:seq(1, 5000)],
-       Tests = [
-               {"\r\n\r\n\r\n\r\n\r\nGET / HTTP/1.1\r\nHost: localhost\r\n\r\n", 200},
-               {"\n", 400},
-               {"Garbage\r\n\r\n", 400},
-               {"\r\n\r\n\r\n\r\n\r\n\r\n", 400},
-               {"GET / HTTP/1.1\r\nHost: dev-extend.eu\r\n\r\n", 400},
-               {"", closed},
-               {"\r\n", closed},
-               {"\r\n\r\n", closed},
-               {"GET / HTTP/1.1", closed},
-               {"GET / HTTP/1.1\r\n", 408},
-               {"GET / HTTP/1.1\r\nHost: localhost", 408},
-               {"GET / HTTP/1.1\r\nHost: localhost\r\n", 408},
-               {"GET / HTTP/1.1\r\nHost: localhost\r\n\r", 408},
-               {"GET http://localhost/ HTTP/1.1\r\n\r\n", 501},
-               {"GET / HTTP/1.2\r\nHost: localhost\r\n\r\n", 505},
-               {"GET /init_shutdown HTTP/1.1\r\nHost: localhost\r\n\r\n", 666},
-               {"GET /long_polling HTTP/1.1\r\nHost: localhost\r\n\r\n", 102},
-               {Huge, 413},
-               {"GET / HTTP/1.1\r\n" ++ Huge, 413}
-       ],
-       [{Packet, StatusCode} = raw_req(Packet, Config)
-               || {Packet, StatusCode} <- Tests].
-
-set_resp_header(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, "GET /set_resp/header HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: close\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {_, _} = binary:match(Data, <<"Vary: Accept">>),
-       {_, _} = binary:match(Data, <<"Set-Cookie: ">>).
-
-set_resp_overwrite(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, "GET /set_resp/overwrite HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: close\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {_Start, _Length} = binary:match(Data, <<"Server: DesireDrive/1.0">>).
-
-set_resp_body(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, "GET /set_resp/body HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: close\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {_Start, _Length} = binary:match(Data, <<"\r\n\r\n"
-               "A flameless dance does not equal a cycle">>).
-
-response_as_req(Config) ->
-       Packet =
-"HTTP/1.0 302 Found
-Location: http://www.google.co.il/
-Cache-Control: private
-Content-Type: text/html; charset=UTF-8
-Set-Cookie: PREF=ID=568f67013d4a7afa:FF=0:TM=1323014101:LM=1323014101:S=XqctDWC65MzKT0zC; expires=Tue, 03-Dec-2013 15:55:01 GMT; path=/; domain=.google.com
-Date: Sun, 04 Dec 2011 15:55:01 GMT
-Server: gws
-Content-Length: 221
-X-XSS-Protection: 1; mode=block
-X-Frame-Options: SAMEORIGIN
-
-<HTML><HEAD><meta http-equiv=\"content-type\" content=\"text/html;charset=utf-8\">
-<TITLE>302 Moved</TITLE></HEAD><BODY>
-<H1>302 Moved</H1>
-The document has moved
-<A HREF=\"http://www.google.co.il/\">here</A>.
-</BODY></HTML>",
-       {Packet, 400} = raw_req(Packet, Config).
-
-stream_body_set_resp(Config) ->
-       {Packet, 200} = raw_resp(
-               "GET /stream_body/set_resp HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: close\r\n\r\n", Config),
-       {_Start, _Length} = binary:match(Packet, <<"stream_body_set_resp">>).
-
-static_mimetypes_function(Config) ->
-       TestURL = build_url("/static_mimetypes_function/test.html", Config),
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test.html\n"}} =
-               httpc:request(TestURL),
-       "text/html" = ?config("content-type", Headers1).
-
-handler_errors(Config) ->
-       Request = fun(Case) ->
-               raw_resp(["GET /handler_errors?case=", Case, " HTTP/1.1\r\n",
-                "Host: localhost\r\n\r\n"], Config) end,
-
-       {_Packet1, 500} = Request("init_before_reply"),
-
-       {Packet2, 200} = Request("init_after_reply"),
-       nomatch = binary:match(Packet2, <<"HTTP/1.1 500">>),
-
-       {Packet3, 200} = Request("init_reply_handle_error"),
-       nomatch = binary:match(Packet3, <<"HTTP/1.1 500">>),
-
-       {_Packet4, 500} = Request("handle_before_reply"),
-
-       {Packet5, 200} = Request("handle_after_reply"),
-       nomatch = binary:match(Packet5, <<"HTTP/1.1 500">>),
-
-       {Packet6, 200} = raw_resp([
-               "GET / HTTP/1.1\r\n",
-               "Host: localhost\r\n",
-               "Connection: keep-alive\r\n\r\n",
-               "GET /handler_errors?case=handle_after_reply\r\n",
-               "Host: localhost\r\n\r\n"], Config),
-       nomatch = binary:match(Packet6, <<"HTTP/1.1 500">>),
-
-       {Packet7, 200} = raw_resp([
-               "GET / HTTP/1.1\r\n",
-               "Host: localhost\r\n",
-               "Connection: keep-alive\r\n\r\n",
-               "GET /handler_errors?case=handle_before_reply HTTP/1.1\r\n",
-               "Host: localhost\r\n\r\n"], Config),
-       {{_, _}, _} = {binary:match(Packet7, <<"HTTP/1.1 500">>), Packet7},
-
-       done.
-
-static_attribute_etag(Config) ->
-       TestURL = build_url("/static_attribute_etag/test.html", Config),
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test.html\n"}} =
-               httpc:request(TestURL),
-       false = ?config("etag", Headers1) =:= undefined,
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers2, "test.html\n"}} =
-               httpc:request(TestURL),
-       true = ?config("etag", Headers1) =:= ?config("etag", Headers2).
-
-static_function_etag(Config) ->
-       TestURL = build_url("/static_function_etag/test.html", Config),
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test.html\n"}} =
-               httpc:request(TestURL),
-       false = ?config("etag", Headers1) =:= undefined,
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers2, "test.html\n"}} =
-               httpc:request(TestURL),
-       true = ?config("etag", Headers1) =:= ?config("etag", Headers2).
-
-static_function_etag(Arguments, etag_data) ->
-       {_, Filepath} = lists:keyfind(filepath, 1, Arguments),
-       {_, _Filesize} = lists:keyfind(filesize, 1, Arguments),
-       {_, _INode} = lists:keyfind(inode, 1, Arguments),
-       {_, _Modified} = lists:keyfind(mtime, 1, Arguments),
-       ChecksumCommand = lists:flatten(io_lib:format("sha1sum ~s", [Filepath])),
-       [Checksum|_] = string:tokens(os:cmd(ChecksumCommand), " "),
-       iolist_to_binary(Checksum).
-
-%% http and https.
-
-build_url(Path, Config) ->
-       {scheme, Scheme} = lists:keyfind(scheme, 1, Config),
-       {port, Port} = lists:keyfind(port, 1, Config),
-       Scheme ++ "://localhost:" ++ integer_to_list(Port) ++ Path.
-
-http_200(Config) ->
-       {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, "http_handler"}} =
-               httpc:request(build_url("/", Config)).
-
-http_404(Config) ->
-       {ok, {{"HTTP/1.1", 404, "Not Found"}, _Headers, _Body}} =
-               httpc:request(build_url("/not/found", Config)).
-
-file_200(Config) ->
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers, "test_file\n"}} =
-               httpc:request(build_url("/static/test_file", Config)),
-       "application/octet-stream" = ?config("content-type", Headers),
-
-       {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test_file.css\n"}} =
-               httpc:request(build_url("/static/test_file.css", Config)),
-       "text/css" = ?config("content-type", Headers1).
-
-file_403(Config) ->
-       {ok, {{"HTTP/1.1", 403, "Forbidden"}, _Headers, _Body}} =
-               httpc:request(build_url("/static/test_noread", Config)).
-
-dir_403(Config) ->
-       {ok, {{"HTTP/1.1", 403, "Forbidden"}, _Headers, _Body}} =
-               httpc:request(build_url("/static/test_dir", Config)),
-       {ok, {{"HTTP/1.1", 403, "Forbidden"}, _Headers, _Body}} =
-               httpc:request(build_url("/static/test_dir/", Config)).
-
-file_404(Config) ->
-       {ok, {{"HTTP/1.1", 404, "Not Found"}, _Headers, _Body}} =
-               httpc:request(build_url("/static/not_found", Config)).
-
-file_400(Config) ->
-       {ok, {{"HTTP/1.1", 400, "Bad Request"}, _Headers, _Body}} =
-               httpc:request(build_url("/static/%2f", Config)),
-       {ok, {{"HTTP/1.1", 400, "Bad Request"}, _Headers1, _Body1}} =
-               httpc:request(build_url("/static/%2e", Config)),
-       {ok, {{"HTTP/1.1", 400, "Bad Request"}, _Headers2, _Body2}} =
-               httpc:request(build_url("/static/%2e%2e", Config)).
-%% misc.
-
-http_10_hostless(Config) ->
-       Packet = "GET / HTTP/1.0\r\n\r\n",
-       {Packet, 200} = raw_req(Packet, Config).
-
-%% rest.
-
-rest_simple(Config) ->
-       Packet = "GET /simple HTTP/1.1\r\nHost: localhost\r\n\r\n",
-       {Packet, 200} = raw_req(Packet, Config).
-
-rest_keepalive(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = rest_keepalive_loop(Socket, 100),
-       ok = gen_tcp:close(Socket).
-
-rest_keepalive_loop(_Socket, 0) ->
-       ok;
-rest_keepalive_loop(Socket, N) ->
-       ok = gen_tcp:send(Socket, "GET /simple HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: keep-alive\r\n\r\n"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {0, 12} = binary:match(Data, <<"HTTP/1.1 200">>),
-       nomatch = binary:match(Data, <<"Connection: close">>),
-       rest_keepalive_loop(Socket, N - 1).
-
-rest_keepalive_post(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = rest_keepalive_post_loop(Socket, 10, forbidden_post),
-       ok = gen_tcp:close(Socket).
-
-rest_keepalive_post_loop(_Socket, 0, _) ->
-       ok;
-rest_keepalive_post_loop(Socket, N, simple_post) ->
-       ok = gen_tcp:send(Socket, "POST /simple_post HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: keep-alive\r\n"
-               "Content-Length: 5\r\nContent-Type: text/plain\r\n\r\n12345"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {0, 12} = binary:match(Data, <<"HTTP/1.1 303">>),
-       nomatch = binary:match(Data, <<"Connection: close">>),
-       rest_keepalive_post_loop(Socket, N - 1, forbidden_post);
-rest_keepalive_post_loop(Socket, N, forbidden_post) ->
-       ok = gen_tcp:send(Socket, "POST /forbidden_post HTTP/1.1\r\n"
-               "Host: localhost\r\nConnection: keep-alive\r\n"
-               "Content-Length: 5\r\nContent-Type: text/plain\r\n\r\n12345"),
-       {ok, Data} = gen_tcp:recv(Socket, 0, 6000),
-       {0, 12} = binary:match(Data, <<"HTTP/1.1 403">>),
-       nomatch = binary:match(Data, <<"Connection: close">>),
-       rest_keepalive_post_loop(Socket, N - 1, simple_post).
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem
deleted file mode 100644 (file)
index a772007..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
------BEGIN CERTIFICATE-----
-MIICKTCCAZICCQCl9gdHk5NqUjANBgkqhkiG9w0BAQUFADBZMQswCQYDVQQGEwJB
-VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
-cyBQdHkgTHRkMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTEwNDA4MTMxNTE3WhcN
-MTEwNTA4MTMxNTE3WjBZMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0
-ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRIwEAYDVQQDDAls
-b2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAOjgFPS0dP4d8F1e
-bNJPB+kAjM2FyTZGmkFCLUYONTPrdGOUIHL/UOGtU22BQzlskE+a6/j2Kg72tm8x
-4X7yf+6s7CdRe086idNx9+GymZ64ZTnly33rD3AJffbBeWHwT2e9fuBeFk9WGC8v
-kqECFZyqf7+znS0o48oBNcx3ePB5AgMBAAEwDQYJKoZIhvcNAQEFBQADgYEASTkv
-oHuZyO8DgT8bIE6W3yM2fvlNshkhh7Thgpf32qQoVOxRU9EF0KpuJCCAHQHQNQlI
-nf9Zc4UzOrLhxZBGocNhkkn4WLw2ysto/7+/+9xHah0M0l4auHLQagVLCoOsHUn2
-JX+A2NrbvuX5wnUrZGOdgY70tvMBeU/xLtp3af8=
------END CERTIFICATE-----
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem
deleted file mode 100644 (file)
index 0b699cc..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-Proc-Type: 4,ENCRYPTED
-DEK-Info: DES-EDE3-CBC,F11262DB77BB804C
-
-jOJ+ft/dihIxz7CTuuK47fCTGdX7xMLANmA7mRg8y9OYhNZQiCz5GjcWLqe0NNl5
-qXPW0uvT/9B5O9o21Y2i/CKU1BqRLuXHXDsjHg7RGaSH6wIavWt+lR+I1sjieFbX
-VByK1KHXjEU704DEILKJIA9gVzoYAgMzo+FTw2e/2jusXntxk8HXyF5zKTzjHBtI
-NQGweJqTmfZjX3SgPP4Co/ShrA6fUG0uTp1HwbByJnwtAeT3xWJrAD4QSn7+qrlv
-3qmEIqVXsvLrfZRY1WZ4uIsbLK8wkvxboSIoIK55VV9R2zRbwQULon6QJwKYujAr
-J2WUYkHHQOMpaAzUmalaT+8GUt8/A1oSK4BdiSZywsMMm46/hDadXBzFg+dPL5g2
-Td+7/L0S6tUVWq4+YBp5EalZH6VQ4cqPYDJZUZ9xt6+yY7V5748lSdA7cHCROnbG
-bKbSW9WbF7MPDHCjvCAfq+s1dafHJgyIOlMg2bm7V8eHWAA0xKQ/o7i5EyEyaKYR
-UXGeAf+KfXcclEZ77v2RCXZvd6ceWkifm59qWv/3TCYaHiS2Aa3lVToMKTwYzzXQ
-p5X5os6wv3IAi2nGyAIOoSDisdHmFteZNXNQsw0n3XCAYfsNMk+r5/r5YqDffURH
-c8SMOCP4BIPoZ/abi/gnEntGqsx1YALg0aosHwHGDJ/l+QJC6u6PZk310YzRw4GL
-K9+wscFgEub2OO+R83Vkfesj4tYzgOjab7+92a/soHdW0zhGejlvehODOgNZ6NUG
-MPQlT+qpF9Jh5IThYXupXXFzJzQe3O/qVXy89m69JGa+AWRvbu+M/A==
------END RSA PRIVATE KEY-----
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler.erl
deleted file mode 100644 (file)
index 76a85d4..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
--record(state, {headers, body}).
-
-init({_Transport, http}, Req, Opts) ->
-       Headers = proplists:get_value(headers, Opts, []),
-       Body = proplists:get_value(body, Opts, "http_handler"),
-       {ok, Req, #state{headers=Headers, body=Body}}.
-
-handle(Req, State=#state{headers=Headers, body=Body}) ->
-       {ok, Req2} = cowboy_http_req:reply(200, Headers, Body, Req),
-       {ok, Req2, State}.
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl
deleted file mode 100644 (file)
index 1c23207..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler_errors).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
-init({_Transport, http}, Req, _Opts) ->
-    {Case, Req1} = cowboy_http_req:qs_val(<<"case">>, Req),
-    case_init(Case, Req1).
-
-case_init(<<"init_before_reply">> = Case, _Req) ->
-    erlang:error(Case);
-
-case_init(<<"init_after_reply">> = Case, Req) ->
-    {ok, _Req1} = cowboy_http_req:reply(200, [], "http_handler_crashes", Req),
-    erlang:error(Case);
-
-case_init(<<"init_reply_handle_error">> = Case, Req) ->
-    {ok, Req1} = cowboy_http_req:reply(200, [], "http_handler_crashes", Req),
-    {ok, Req1, Case};
-
-case_init(<<"handle_before_reply">> = Case, Req) ->
-    {ok, Req, Case};
-
-case_init(<<"handle_after_reply">> = Case, Req) ->
-    {ok, Req, Case}.
-
-
-handle(_Req, <<"init_reply_handle_error">> = Case) ->
-    erlang:error(Case);
-
-handle(_Req, <<"handle_before_reply">> = Case) ->
-    erlang:error(Case);
-
-handle(Req, <<"handle_after_reply">> = Case) ->
-    {ok, _Req1} = cowboy_http_req:reply(200, [], "http_handler_crashes", Req),
-    erlang:error(Case).
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl
deleted file mode 100644 (file)
index ac63b44..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler_init_shutdown).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
-init({_Transport, http}, Req, _Opts) ->
-       {ok, Req2} = cowboy_http_req:reply(<<"666 Init Shutdown Testing">>,
-               [{'Connection', <<"close">>}], Req),
-       {shutdown, Req2, undefined}.
-
-handle(Req, State) ->
-       {ok, Req2} = cowboy_http_req:reply(200, [], "Hello world!", Req),
-       {ok, Req2, State}.
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl
deleted file mode 100644 (file)
index e838619..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler_long_polling).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, info/3, terminate/2]).
-
-init({_Transport, http}, Req, _Opts) ->
-       erlang:send_after(500, self(), timeout),
-       {loop, Req, 9, 5000, hibernate}.
-
-handle(_Req, _State) ->
-       exit(badarg).
-
-info(timeout, Req, 0) ->
-       {ok, Req2} = cowboy_http_req:reply(102, Req),
-       {ok, Req2, 0};
-info(timeout, Req, State) ->
-       erlang:send_after(500, self(), timeout),
-       {loop, Req, State - 1, hibernate}.
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl
deleted file mode 100644 (file)
index f5f7919..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler_multipart).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
-init({_Transport, http}, Req, []) ->
-       {ok, Req, {}}.
-
-handle(Req, State) ->
-       {Result, Req2} = acc_multipart(Req, []),
-       {ok, Req3} = cowboy_http_req:reply(200, [], term_to_binary(Result), Req2),
-       {ok, Req3, State}.
-
-terminate(_Req, _State) ->
-       ok.
-
-acc_multipart(Req, Acc) ->
-       {Result, Req2} = cowboy_http_req:multipart_data(Req),
-       acc_multipart(Req2, Acc, Result).
-
-acc_multipart(Req, Acc, {headers, Headers}) ->
-       acc_multipart(Req, [{Headers, []}|Acc]);
-acc_multipart(Req, [{Headers, BodyAcc}|Acc], {body, Data}) ->
-       acc_multipart(Req, [{Headers, [Data|BodyAcc]}|Acc]);
-acc_multipart(Req, [{Headers, BodyAcc}|Acc], end_of_part) ->
-       acc_multipart(Req, [{Headers, list_to_binary(lists:reverse(BodyAcc))}|Acc]);
-acc_multipart(Req, Acc, eof) ->
-       {lists:reverse(Acc), Req}.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl
deleted file mode 100644 (file)
index 83d48c0..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler_set_resp).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
-init({_Transport, http}, Req, Opts) ->
-       Headers = proplists:get_value(headers, Opts, []),
-       Body = proplists:get_value(body, Opts, <<"http_handler_set_resp">>),
-       {ok, Req2} = lists:foldl(fun({Name, Value}, {ok, R}) ->
-               cowboy_http_req:set_resp_header(Name, Value, R)
-       end, {ok, Req}, Headers),
-       {ok, Req3} = cowboy_http_req:set_resp_body(Body, Req2),
-       {ok, Req4} = cowboy_http_req:set_resp_header(
-               <<"X-Cowboy-Test">>, <<"ok">>, Req3),
-       {ok, Req5} = cowboy_http_req:set_resp_cookie(
-               <<"cake">>, <<"lie">>, [], Req4),
-       {ok, Req5, undefined}.
-
-handle(Req, State) ->
-       case cowboy_http_req:has_resp_header(<<"X-Cowboy-Test">>, Req) of
-               false -> {ok, Req, State};
-               true ->
-                       case cowboy_http_req:has_resp_body(Req) of
-                               false -> {ok, Req, State};
-                               true ->
-                                       {ok, Req2} = cowboy_http_req:reply(200, Req),
-                                       {ok, Req2, State}
-                       end
-       end.
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl
deleted file mode 100644 (file)
index c90f746..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(http_handler_stream_body).
--behaviour(cowboy_http_handler).
--export([init/3, handle/2, terminate/2]).
-
--record(state, {headers, body, reply}).
-
-init({_Transport, http}, Req, Opts) ->
-       Headers = proplists:get_value(headers, Opts, []),
-       Body = proplists:get_value(body, Opts, "http_handler_stream_body"),
-       Reply = proplists:get_value(reply, Opts),
-       {ok, Req, #state{headers=Headers, body=Body, reply=Reply}}.
-
-handle(Req, State=#state{headers=_Headers, body=Body, reply=set_resp}) ->
-       {ok, Transport, Socket} = cowboy_http_req:transport(Req),
-       SFun = fun() -> Transport:send(Socket, Body), sent end,
-       SLen = iolist_size(Body),
-       {ok, Req2} = cowboy_http_req:set_resp_body_fun(SLen, SFun, Req),
-       {ok, Req3} = cowboy_http_req:reply(200, Req2),
-       {ok, Req3, State}.
-
-terminate(_Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_forbidden_resource.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_forbidden_resource.erl
deleted file mode 100644 (file)
index 90dee84..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
--module(rest_forbidden_resource).
--export([init/3, rest_init/2, allowed_methods/2, forbidden/2,
-               content_types_provided/2, content_types_accepted/2,
-               post_is_create/2, create_path/2, to_text/2, from_text/2]).
-
-init(_Transport, _Req, _Opts) ->
-       {upgrade, protocol, cowboy_http_rest}.
-
-rest_init(Req, [Forbidden]) ->
-       {ok, Req, Forbidden}.
-
-allowed_methods(Req, State) ->
-       {['GET', 'HEAD', 'POST'], Req, State}.
-
-forbidden(Req, State=true) ->
-       {true, Req, State};
-forbidden(Req, State=false) ->
-       {false, Req, State}.
-
-content_types_provided(Req, State) ->
-       {[{{<<"text">>, <<"plain">>, []}, to_text}], Req, State}.
-
-content_types_accepted(Req, State) ->
-       {[{{<<"text">>, <<"plain">>, []}, from_text}], Req, State}.
-
-post_is_create(Req, State) ->
-       {true, Req, State}.
-
-create_path(Req, State) ->
-       {Path, Req2} = cowboy_http_req:raw_path(Req),
-       {Path, Req2, State}.
-
-to_text(Req, State) ->
-       {<<"This is REST!">>, Req, State}.
-
-from_text(Req, State) ->
-       {true, Req, State}.
-
-
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_simple_resource.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_simple_resource.erl
deleted file mode 100644 (file)
index e2c573c..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
--module(rest_simple_resource).
--export([init/3, content_types_provided/2, get_text_plain/2]).
-
-init(_Transport, _Req, _Opts) ->
-       {upgrade, protocol, cowboy_http_rest}.
-
-content_types_provided(Req, State) ->
-       {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}.
-
-get_text_plain(Req, State) ->
-       {<<"This is REST!">>, Req, State}.
-
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl
deleted file mode 100644 (file)
index abb4967..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(websocket_handler).
--behaviour(cowboy_http_handler).
--behaviour(cowboy_http_websocket_handler).
--export([init/3, handle/2, terminate/2]).
--export([websocket_init/3, websocket_handle/3,
-       websocket_info/3, websocket_terminate/3]).
-
-init(_Any, _Req, _Opts) ->
-       {upgrade, protocol, cowboy_http_websocket}.
-
-handle(_Req, _State) ->
-       exit(badarg).
-
-terminate(_Req, _State) ->
-       exit(badarg).
-
-websocket_init(_TransportName, Req, _Opts) ->
-       erlang:start_timer(1000, self(), <<"websocket_init">>),
-       Req2 = cowboy_http_req:compact(Req),
-       {ok, Req2, undefined}.
-
-websocket_handle({text, Data}, Req, State) ->
-       {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
-       {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
-       {ok, Req, State}.
-
-websocket_info({timeout, _Ref, Msg}, Req, State) ->
-       erlang:start_timer(1000, self(), <<"websocket_handle">>),
-       {reply, {text, Msg}, Req, State};
-websocket_info(_Info, Req, State) ->
-       {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl
deleted file mode 100644 (file)
index aa9e056..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(websocket_handler_init_shutdown).
--behaviour(cowboy_http_handler).
--behaviour(cowboy_http_websocket_handler).
--export([init/3, handle/2, terminate/2]).
--export([websocket_init/3, websocket_handle/3,
-       websocket_info/3, websocket_terminate/3]).
-
-init(_Any, _Req, _Opts) ->
-       {upgrade, protocol, cowboy_http_websocket}.
-
-handle(_Req, _State) ->
-       exit(badarg).
-
-terminate(_Req, _State) ->
-       exit(badarg).
-
-websocket_init(_TransportName, Req, _Opts) ->
-       {ok, Req2} = cowboy_http_req:reply(403, Req),
-       {shutdown, Req2}.
-
-websocket_handle(_Frame, _Req, _State) ->
-       exit(badarg).
-
-websocket_info(_Info, _Req, _State) ->
-       exit(badarg).
-
-websocket_terminate(_Reason, _Req, _State) ->
-       exit(badarg).
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl
deleted file mode 100644 (file)
index 136833f..0000000
+++ /dev/null
@@ -1,318 +0,0 @@
-%% Copyright (c) 2011, Loïc Hoguin <essen@dev-extend.eu>
-%%
-%% Permission to use, copy, modify, and/or distribute this software for any
-%% purpose with or without fee is hereby granted, provided that the above
-%% copyright notice and this permission notice appear in all copies.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--module(ws_SUITE).
-
--include_lib("common_test/include/ct.hrl").
-
--export([all/0, groups/0, init_per_suite/1, end_per_suite/1,
-       init_per_group/2, end_per_group/2]). %% ct.
--export([ws0/1, ws8/1, ws8_single_bytes/1, ws8_init_shutdown/1,
-       ws13/1, ws_timeout_hibernate/1]). %% ws.
-
-%% ct.
-
-all() ->
-       [{group, ws}].
-
-groups() ->
-       BaseTests = [ws0, ws8, ws8_single_bytes, ws8_init_shutdown, ws13,
-               ws_timeout_hibernate],
-       [{ws, [], BaseTests}].
-
-init_per_suite(Config) ->
-       application:start(inets),
-       application:start(cowboy),
-       Config.
-
-end_per_suite(_Config) ->
-       application:stop(cowboy),
-       application:stop(inets),
-       ok.
-
-init_per_group(ws, Config) ->
-       Port = 33080,
-       cowboy:start_listener(ws, 100,
-               cowboy_tcp_transport, [{port, Port}],
-               cowboy_http_protocol, [{dispatch, init_dispatch()}]
-       ),
-       [{port, Port}|Config].
-
-end_per_group(Listener, _Config) ->
-       cowboy:stop_listener(Listener),
-       ok.
-
-%% Dispatch configuration.
-
-init_dispatch() ->
-       [
-               {[<<"localhost">>], [
-                       {[<<"websocket">>], websocket_handler, []},
-                       {[<<"ws_timeout_hibernate">>], ws_timeout_hibernate_handler, []},
-                       {[<<"ws_init_shutdown">>], websocket_handler_init_shutdown, []}
-               ]}
-       ].
-
-%% ws and wss.
-
-%% This test makes sure the code works even if we wait for a reply
-%% before sending the third challenge key in the GET body.
-%%
-%% This ensures that Cowboy will work fine with proxies on hixie.
-ws0(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket,
-               "GET /websocket HTTP/1.1\r\n"
-               "Host: localhost\r\n"
-               "Connection: Upgrade\r\n"
-               "Upgrade: WebSocket\r\n"
-               "Origin: http://localhost\r\n"
-               "Sec-Websocket-Key1: Y\" 4 1Lj!957b8@0H756!i\r\n"
-               "Sec-Websocket-Key2: 1711 M;4\\74  80<6\r\n"
-               "\r\n"),
-       {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, {http_response, {1, 1}, 101, "WebSocket Protocol Handshake"}, Rest}
-               = erlang:decode_packet(http, Handshake, []),
-       [Headers, <<>>] = websocket_headers(
-               erlang:decode_packet(httph, Rest, []), []),
-       {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers),
-       {'Upgrade', "WebSocket"} = lists:keyfind('Upgrade', 1, Headers),
-       {"sec-websocket-location", "ws://localhost/websocket"}
-               = lists:keyfind("sec-websocket-location", 1, Headers),
-       {"sec-websocket-origin", "http://localhost"}
-               = lists:keyfind("sec-websocket-origin", 1, Headers),
-       ok = gen_tcp:send(Socket, <<15,245,8,18,2,204,133,33>>),
-       {ok, Body} = gen_tcp:recv(Socket, 0, 6000),
-       <<169,244,191,103,146,33,149,59,74,104,67,5,99,118,171,236>> = Body,
-       ok = gen_tcp:send(Socket, << 0, "client_msg", 255 >>),
-       {ok, << 0, "client_msg", 255 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 0, "websocket_init", 255 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 0, "websocket_handle", 255 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 0, "websocket_handle", 255 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 0, "websocket_handle", 255 >>} = gen_tcp:recv(Socket, 0, 6000),
-       %% We try to send another HTTP request to make sure
-       %% the server closed the request.
-       ok = gen_tcp:send(Socket, [
-               << 255, 0 >>, %% Close websocket command.
-               "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n" %% Server should ignore it.
-       ]),
-       {ok, << 255, 0 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {error, closed} = gen_tcp:recv(Socket, 0, 6000),
-       ok.
-
-ws8(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, [
-               "GET /websocket HTTP/1.1\r\n"
-               "Host: localhost\r\n"
-               "Connection: Upgrade\r\n"
-               "Upgrade: websocket\r\n"
-               "Sec-WebSocket-Origin: http://localhost\r\n"
-               "Sec-WebSocket-Version: 8\r\n"
-               "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n"
-               "\r\n"]),
-       {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest}
-               = erlang:decode_packet(http, Handshake, []),
-       [Headers, <<>>] = websocket_headers(
-               erlang:decode_packet(httph, Rest, []), []),
-       {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers),
-       {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers),
-       {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}
-               = lists:keyfind("sec-websocket-accept", 1, Headers),
-       ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d,
-               16#7f, 16#9f, 16#4d, 16#51, 16#58 >>),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 0:8 >>), %% ping
-       {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), %% close
-       {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {error, closed} = gen_tcp:recv(Socket, 0, 6000),
-       ok.
-
-ws8_single_bytes(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, [
-               "GET /websocket HTTP/1.1\r\n"
-               "Host: localhost\r\n"
-               "Connection: Upgrade\r\n"
-               "Upgrade: websocket\r\n"
-               "Sec-WebSocket-Origin: http://localhost\r\n"
-               "Sec-WebSocket-Version: 8\r\n"
-               "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n"
-               "\r\n"]),
-       {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest}
-               = erlang:decode_packet(http, Handshake, []),
-       [Headers, <<>>] = websocket_headers(
-               erlang:decode_packet(httph, Rest, []), []),
-       {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers),
-       {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers),
-       {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}
-               = lists:keyfind("sec-websocket-accept", 1, Headers),
-       ok = gen_tcp:send(Socket, << 16#81 >>), %% send one byte
-       ok = timer:sleep(100), %% sleep for a period
-       ok = gen_tcp:send(Socket, << 16#85 >>), %% send another and so on
-        ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#37 >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#fa >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#21 >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#3d >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#7f >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#9f >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#4d >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#51 >>),
-       ok = timer:sleep(100),
-       ok = gen_tcp:send(Socket, << 16#58 >>),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 0:8 >>), %% ping
-       {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), %% close
-       {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {error, closed} = gen_tcp:recv(Socket, 0, 6000),
-       ok.
-
-ws_timeout_hibernate(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, [
-               "GET /ws_timeout_hibernate HTTP/1.1\r\n"
-               "Host: localhost\r\n"
-               "Connection: Upgrade\r\n"
-               "Upgrade: websocket\r\n"
-               "Sec-WebSocket-Origin: http://localhost\r\n"
-               "Sec-WebSocket-Version: 8\r\n"
-               "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n"
-               "\r\n"]),
-       {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest}
-               = erlang:decode_packet(http, Handshake, []),
-       [Headers, <<>>] = websocket_headers(
-               erlang:decode_packet(httph, Rest, []), []),
-       {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers),
-       {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers),
-       {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}
-               = lists:keyfind("sec-websocket-accept", 1, Headers),
-       {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {error, closed} = gen_tcp:recv(Socket, 0, 6000),
-       ok.
-
-ws8_init_shutdown(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, [
-               "GET /ws_init_shutdown HTTP/1.1\r\n"
-               "Host: localhost\r\n"
-               "Connection: Upgrade\r\n"
-               "Upgrade: websocket\r\n"
-               "Sec-WebSocket-Origin: http://localhost\r\n"
-               "Sec-WebSocket-Version: 8\r\n"
-               "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n"
-               "\r\n"]),
-       {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, {http_response, {1, 1}, 403, "Forbidden"}, _Rest}
-               = erlang:decode_packet(http, Handshake, []),
-       {error, closed} = gen_tcp:recv(Socket, 0, 6000),
-       ok.
-
-ws13(Config) ->
-       {port, Port} = lists:keyfind(port, 1, Config),
-       {ok, Socket} = gen_tcp:connect("localhost", Port,
-               [binary, {active, false}, {packet, raw}]),
-       ok = gen_tcp:send(Socket, [
-               "GET /websocket HTTP/1.1\r\n"
-               "Host: localhost\r\n"
-               "Connection: Upgrade\r\n"
-               "Origin: http://localhost\r\n"
-               "Sec-WebSocket-Version: 13\r\n"
-               "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n"
-               "Upgrade: websocket\r\n"
-               "\r\n"]),
-       {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000),
-       {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest}
-               = erlang:decode_packet(http, Handshake, []),
-       [Headers, <<>>] = websocket_headers(
-               erlang:decode_packet(httph, Rest, []), []),
-       {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers),
-       {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers),
-       {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}
-               = lists:keyfind("sec-websocket-accept", 1, Headers),
-       %% text
-       ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d,
-               16#7f, 16#9f, 16#4d, 16#51, 16#58 >>),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       %% binary (empty)
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 2:4, 0:8 >>),
-       {ok, << 1:1, 0:3, 2:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000),
-       %% binary
-       ok = gen_tcp:send(Socket, << 16#82, 16#85, 16#37, 16#fa, 16#21, 16#3d,
-               16#7f, 16#9f, 16#4d, 16#51, 16#58 >>),
-       {ok, << 1:1, 0:3, 2:4, 0:1, 5:7, "Hello" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       %% Receives.
-       {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>}
-               = gen_tcp:recv(Socket, 0, 6000),
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 0:8 >>), %% ping
-       {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong
-       ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), %% close
-       {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000),
-       {error, closed} = gen_tcp:recv(Socket, 0, 6000),
-       ok.
-
-websocket_headers({ok, http_eoh, Rest}, Acc) ->
-       [Acc, Rest];
-websocket_headers({ok, {http_header, _I, Key, _R, Value}, Rest}, Acc) ->
-       F = fun(S) when is_atom(S) -> S; (S) -> string:to_lower(S) end,
-       websocket_headers(erlang:decode_packet(httph, Rest, []),
-               [{F(Key), Value}|Acc]).
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl
deleted file mode 100644 (file)
index 777948a..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-%% Feel free to use, reuse and abuse the code in this file.
-
--module(ws_timeout_hibernate_handler).
--behaviour(cowboy_http_handler).
--behaviour(cowboy_http_websocket_handler).
--export([init/3, handle/2, terminate/2]).
--export([websocket_init/3, websocket_handle/3,
-       websocket_info/3, websocket_terminate/3]).
-
-init(_Any, _Req, _Opts) ->
-       {upgrade, protocol, cowboy_http_websocket}.
-
-handle(_Req, _State) ->
-       exit(badarg).
-
-terminate(_Req, _State) ->
-       exit(badarg).
-
-websocket_init(_TransportName, Req, _Opts) ->
-       {ok, Req, undefined, 1000, hibernate}.
-
-websocket_handle(_Frame, Req, State) ->
-       {ok, Req, State, hibernate}.
-
-websocket_info(_Info, Req, State) ->
-       {ok, Req, State, hibernate}.
-
-websocket_terminate(_Reason, _Req, _State) ->
-       ok.
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk b/rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk
deleted file mode 100644 (file)
index 5071907..0000000
+++ /dev/null
@@ -1 +0,0 @@
-UPSTREAM_SHORT_HASH:=4b93c2d
diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/package.mk b/rabbitmq-server/plugins-src/cowboy-wrapper/package.mk
deleted file mode 100644 (file)
index fd29da9..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-APP_NAME:=cowboy
-
-UPSTREAM_GIT:=https://github.com/rabbitmq/cowboy.git
-UPSTREAM_REVISION:=4b93c2d19a10e5d9cee
-RETAIN_ORIGINAL_VERSION:=true
-WRAPPER_PATCHES:=\
-       0001-R12-fake-iodata-type.patch \
-       0002-R12-drop-all-references-to-boolean-type.patch \
-       0003-R12-drop-all-references-to-reference-type.patch \
-       0004-R12-drop-references-to-iodata-type.patch \
-       0005-R12-drop-references-to-Default-any-type.patch \
-       0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch \
-       0007-R12-type-definitions-must-be-ordered.patch \
-       0008-sec-websocket-protocol.patch
-
-# Path include/http.hrl is needed during compilation
-INCLUDE_DIRS+=$(CLONE_DIR)
-
-ORIGINAL_APP_FILE:=$(CLONE_DIR)/src/$(APP_NAME).app.src
-DO_NOT_GENERATE_APP_FILE=true
-
-define construct_app_commands
-       cp $(CLONE_DIR)/LICENSE $(APP_DIR)/LICENSE-ISC-Cowboy
-endef
diff --git a/rabbitmq-server/plugins-src/do-package.mk b/rabbitmq-server/plugins-src/do-package.mk
deleted file mode 100644 (file)
index b76c9a5..0000000
+++ /dev/null
@@ -1,578 +0,0 @@
-# This file produces the makefile fragment associated with a package.
-# It includes the package's package.mk, interprets all of the
-# variables that package.mk might have set, and then visits any
-# dependencies of the package that have not already been visited.
-#
-# PACKAGE_DIR should be set to the canonical path of the package.
-
-# Mark that this package has been visited, so we can avoid doing it again
-DONE_$(PACKAGE_DIR):=true
-
-# Declare the standard per-package targets
-.PHONY: $(PACKAGE_DIR)+dist $(PACKAGE_DIR)+clean $(PACKAGE_DIR)+clean-recursive
-
-$(PACKAGE_DIR)+dist:: $(PACKAGE_DIR)/dist/.done
-
-$(PACKAGE_DIR)+srcdist:: $(PACKAGE_DIR)/srcdist/.done
-
-$(PACKAGE_DIR)+clean::
-
-$(PACKAGE_DIR)+clean-with-deps:: $(PACKAGE_DIR)+clean
-
-# Hook into the "all package" targets used by the main public-umbrella
-# makefile
-all-packages:: $(PACKAGE_DIR)/dist/.done
-clean-all-packages:: $(PACKAGE_DIR)+clean
-
-ifndef NON_INTEGRATED_$(PACKAGE_DIR)
-
-PACKAGE_NAME=$(notdir $(abspath $(PACKAGE_DIR)))
-
-# Set all the per-package vars to their default values
-
-# The packages upon which this package depends
-DEPS:=
-
-# The name of the erlang application produced by the package
-APP_NAME=$(call package_to_app_name,$(PACKAGE_NAME))
-
-# The location of the .app file which is used as the basis for the
-# .app file which goes into the .ez
-ORIGINAL_APP_FILE=$(EBIN_DIR)/$(APP_NAME).app
-
-# The location of the source for that file (before the modules list is
-# generated). Ignored if DO_NOT_GENERATE_APP_FILE is set.
-ORIGINAL_APP_SOURCE=$(PACKAGE_DIR)/src/$(APP_NAME).app.src
-
-# Set to prevent generation of the app file.
-DO_NOT_GENERATE_APP_FILE:=
-
-# Should the .ez files for this package, its dependencies, and its
-# source distribution be included in RabbitMQ releases, and should we test
-# this plugin when invoking "make test" in the umbrella?
-RELEASABLE:=
-
-# The options to pass to erlc when compiling .erl files in this
-# package
-PACKAGE_ERLC_OPTS=$(ERLC_OPTS)
-
-# The directories containing Erlang source files
-SOURCE_DIRS:=$(PACKAGE_DIR)/src
-
-# The Erlang source files to compile and include in the package .ez file
-SOURCE_ERLS=$(strip $(foreach D,$(SOURCE_DIRS),$(wildcard $(D)/*.erl)))
-
-# The directories containing Erlang *.hrl files to include in the
-# package .ez file.
-INCLUDE_DIRS:=$(PACKAGE_DIR)/include
-
-# The Erlang .hrl files to include in the package .ez file.
-INCLUDE_HRLS=$(strip $(foreach D,$(INCLUDE_DIRS),$(wildcard $(D)/*.hrl)))
-
-# The location of the directory containing the .app file.  This is
-# also where the .beam files produced by compiling SOURCE_ERLS will
-# go.
-EBIN_DIR:=$(PACKAGE_DIR)/ebin
-
-# The .beam files for the application.
-EBIN_BEAMS=$(patsubst %,$(EBIN_DIR)/%.beam,$(notdir $(basename $(SOURCE_ERLS))))
-
-# Erlang expressions which will be invoked during testing (not in the
-# broker).
-STANDALONE_TEST_COMMANDS:=
-
-# Erlang expressions which will be invoked within the broker during
-# testing.
-WITH_BROKER_TEST_COMMANDS:=
-
-# Config file to give to the test broker.
-WITH_BROKER_TEST_CONFIG:=
-
-# Test scripts which should be invokedduring testing
-STANDALONE_TEST_SCRIPTS:=
-
-# Test scripts which should be invoked alongside a running broker
-# during testing
-WITH_BROKER_TEST_SCRIPTS:=
-
-# Test scripts which should be invoked to configure the broker before testing
-WITH_BROKER_SETUP_SCRIPTS:=
-
-# When cleaning, should we also remove the cloned directory for
-# wrappers?
-PRESERVE_CLONE_DIR?=
-
-# The directory within the package that contains tests
-TEST_DIR=$(PACKAGE_DIR)/test
-
-# The directories containing .erl files for tests
-TEST_SOURCE_DIRS=$(TEST_DIR)/src
-
-# The .erl files for tests
-TEST_SOURCE_ERLS=$(strip $(foreach D,$(TEST_SOURCE_DIRS),$(wildcard $(D)/*.erl)))
-
-# Where to put .beam files produced by compiling TEST_SOURCE_ERLS
-TEST_EBIN_DIR=$(TEST_DIR)/ebin
-
-# The .beam files produced by compiling TEST_SOURCE_ERLS
-TEST_EBIN_BEAMS=$(patsubst %,$(TEST_EBIN_DIR)/%.beam,$(notdir $(basename $(TEST_SOURCE_ERLS))))
-
-# Wrapper package variables
-
-# The git URL to clone from.  Setting this variable marks the package
-# as a wrapper package.
-UPSTREAM_GIT:=
-
-# The Mercurial URL to clone from.  Setting this variable marks the
-# package as a wrapper package.
-UPSTREAM_HG:=
-
-UPSTREAM_TYPE=$(if $(UPSTREAM_GIT),git)$(if $(UPSTREAM_HG),hg)
-
-# The upstream revision to clone.  Leave empty for default or master
-UPSTREAM_REVISION:=
-
-# Where to clone the upstream repository to
-CLONE_DIR=$(PACKAGE_DIR)/$(patsubst %-wrapper,%,$(PACKAGE_NAME))-$(UPSTREAM_TYPE)
-
-# The source directories contained in the cloned repositories.  These
-# are appended to SOURCE_DIRS.
-UPSTREAM_SOURCE_DIRS=$(CLONE_DIR)/src
-
-# The include directories contained in the cloned repositories.  These
-# are appended to INCLUDE_DIRS.
-UPSTREAM_INCLUDE_DIRS=$(CLONE_DIR)/include
-
-# Patches to apply to the upstream codebase after cloning, if any
-WRAPPER_PATCHES:=
-
-# The version number to assign to the build artifacts
-PACKAGE_VERSION=$(VERSION)
-
-# Should the app version incorporate the version from the original
-# .app file?
-RETAIN_ORIGINAL_VERSION:=
-
-# The original version that should be incorporated into the package
-# version if RETAIN_ORIGINAL_VERSION is set.  If empty, the original
-# version will be extracted from ORIGINAL_APP_FILE.
-ORIGINAL_VERSION:=
-
-# For customising construction of the build application directory.
-CONSTRUCT_APP_PREREQS:=
-construct_app_commands=
-
-package_rules=
-
-# Now let the package makefile fragment do its stuff
-include $(PACKAGE_DIR)/package.mk
-
-# package_rules provides a convenient way to force prompt expansion
-# of variables, including expansion in commands that would otherwise
-# be deferred.
-#
-# If package_rules is defined by the package makefile, we expand it
-# and eval it.  The point here is to get around the fact that make
-# defers expansion of commands.  But if we use package variables in
-# targets, as we naturally want to do, deferred expansion doesn't
-# work: They might have been trampled on by a later package.  Because
-# we expand package_rules here, references to package varialbes will
-# get expanded with the values we expect.
-#
-# The downside is that any variable references for which expansion
-# really should be deferred need to be protected by doulbing up the
-# dollar.  E.g., inside package_rules, you should write $$@, not $@.
-#
-# We use the same trick again below.
-ifdef package_rules
-$(eval $(package_rules))
-endif
-
-# Some variables used for brevity below.  Packages can't set these.
-APP_FILE=$(PACKAGE_DIR)/build/$(APP_NAME).app.$(PACKAGE_VERSION)
-APP_DONE=$(PACKAGE_DIR)/build/app/.done.$(PACKAGE_VERSION)
-APP_DIR=$(PACKAGE_DIR)/build/app/$(APP_NAME)-$(PACKAGE_VERSION)
-EZ_FILE=$(PACKAGE_DIR)/dist/$(APP_NAME)-$(PACKAGE_VERSION).ez
-DEPS_FILE=$(PACKAGE_DIR)/build/deps.mk
-
-
-# Convert the DEPS package names to canonical paths
-DEP_PATHS:=$(foreach DEP,$(DEPS),$(call package_to_path,$(DEP)))
-
-# Handle RETAIN_ORIGINAL_VERSION / ORIGINAL_VERSION
-ifdef RETAIN_ORIGINAL_VERSION
-
-# Automatically acquire ORIGINAL_VERSION from ORIGINAL_APP_FILE
-ifndef ORIGINAL_VERSION
-
-# The generated ORIGINAL_VERSION setting goes in build/version.mk
-$(eval $(call safe_include,$(PACKAGE_DIR)/build/version.mk))
-
-$(PACKAGE_DIR)/build/version.mk: $(ORIGINAL_APP_FILE)
-       sed -n -e 's|^.*{vsn, *"\([^"]*\)".*$$|ORIGINAL_VERSION:=\1|p' <$< >$@
-
-$(APP_FILE): $(PACKAGE_DIR)/build/version.mk
-
-endif # ifndef ORIGINAL_VERSION
-
-PACKAGE_VERSION:=$(ORIGINAL_VERSION)-rmq$(VERSION)
-
-endif # ifdef RETAIN_ORIGINAL_VERSION
-
-# Handle wrapper packages
-ifneq ($(UPSTREAM_TYPE),)
-
-SOURCE_DIRS+=$(UPSTREAM_SOURCE_DIRS)
-INCLUDE_DIRS+=$(UPSTREAM_INCLUDE_DIRS)
-
-define package_rules
-
-# We use --no-backup-if-mismatch to prevent .orig files ending up in
-# source builds and causing warnings on Debian if the patches have
-# fuzz.
-ifdef UPSTREAM_GIT
-$(CLONE_DIR)/.done:
-       rm -rf $(CLONE_DIR)
-       git clone $(UPSTREAM_GIT) $(CLONE_DIR)
-       # Work around weird github breakage (bug 25264)
-       cd $(CLONE_DIR) && git pull
-       $(if $(UPSTREAM_REVISION),cd $(CLONE_DIR) && git checkout $(UPSTREAM_REVISION))
-       $(if $(WRAPPER_PATCHES),$(foreach F,$(WRAPPER_PATCHES),patch -E -z .umbrella-orig -d $(CLONE_DIR) -p1 <$(PACKAGE_DIR)/$(F) &&) :)
-       find $(CLONE_DIR) -name "*.umbrella-orig" -delete
-       touch $$@
-endif # UPSTREAM_GIT
-
-ifdef UPSTREAM_HG
-$(CLONE_DIR)/.done:
-       rm -rf $(CLONE_DIR)
-       hg clone -r $(or $(UPSTREAM_REVISION),default) $(UPSTREAM_HG) $(CLONE_DIR)
-       $(if $(WRAPPER_PATCHES),$(foreach F,$(WRAPPER_PATCHES),patch -E -z .umbrella-orig -d $(CLONE_DIR) -p1 <$(PACKAGE_DIR)/$(F) &&) :)
-       find $(CLONE_DIR) -name "*.umbrella-orig" -delete
-       touch $$@
-endif # UPSTREAM_HG
-
-# When we clone, we need to remake anything derived from the app file
-# (e.g. build/version.mk).
-$(ORIGINAL_APP_FILE): $(CLONE_DIR)/.done
-
-# We include the commit hash into the package version, via hash.mk
-# (not in build/ because we want it to survive
-#   make PRESERVE_CLONE_DIR=true clean
-# for obvious reasons)
-$(eval $(call safe_include,$(PACKAGE_DIR)/hash.mk))
-
-$(PACKAGE_DIR)/hash.mk: $(CLONE_DIR)/.done
-       @mkdir -p $$(@D)
-ifdef UPSTREAM_GIT
-       echo UPSTREAM_SHORT_HASH:=`git --git-dir=$(CLONE_DIR)/.git log -n 1 HEAD | grep commit | cut -b 8-14` >$$@
-endif
-ifdef UPSTREAM_HG
-       echo UPSTREAM_SHORT_HASH:=`hg id -R $(CLONE_DIR) -i | cut -c -7` >$$@
-endif
-
-$(APP_FILE): $(PACKAGE_DIR)/hash.mk
-
-PACKAGE_VERSION:=$(PACKAGE_VERSION)-$(UPSTREAM_TYPE)$(UPSTREAM_SHORT_HASH)
-
-$(PACKAGE_DIR)+clean::
-       [ "x" != "x$(PRESERVE_CLONE_DIR)" ] || rm -rf $(CLONE_DIR) hash.mk
-endef # package_rules
-$(eval $(package_rules))
-
-endif # UPSTREAM_TYPE
-
-# Generate a rule to compile .erl files from the directory $(1) into
-# directory $(2), taking extra erlc options from $(3)
-define package_source_dir_targets
-$(2)/%.beam: $(1)/%.erl $(PACKAGE_DIR)/build/dep-apps/.done | $(DEPS_FILE)
-       @mkdir -p $$(@D)
-       ERL_LIBS=$(PACKAGE_DIR)/build/dep-apps $(ERLC) $(PACKAGE_ERLC_OPTS) $(foreach D,$(INCLUDE_DIRS),-I $(D)) -pa $$(@D) -o $$(@D) $(3) $$<
-
-endef
-
-$(eval $(foreach D,$(SOURCE_DIRS),$(call package_source_dir_targets,$(D),$(EBIN_DIR),)))
-$(eval $(foreach D,$(TEST_SOURCE_DIRS),$(call package_source_dir_targets,$(D),$(TEST_EBIN_DIR),-pa $(EBIN_DIR))))
-
-# Commands to run the broker for tests
-#
-# $(1): The value for RABBITMQ_SERVER_START_ARGS
-# $(2): Extra env var settings when invoking the rabbitmq-server script
-# $(3): Extra .ezs to copy into the plugins dir
-define run_broker
-       rm -rf $(TEST_TMPDIR)
-       mkdir -p $(foreach D,log plugins $(NODENAME),$(TEST_TMPDIR)/$(D))
-       cp -p $(PACKAGE_DIR)/dist/*.ez $(TEST_TMPDIR)/plugins
-       $(call copy,$(3),$(TEST_TMPDIR)/plugins)
-       rm -f $(TEST_TMPDIR)/plugins/rabbit_common*.ez
-       RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \
-           RABBITMQ_ENABLED_PLUGINS_FILE=$(TEST_TMPDIR)/enabled_plugins \
-           $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmq-plugins \
-           set --offline $$$$(RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \
-            RABBITMQ_ENABLED_PLUGINS_FILE=$(TEST_TMPDIR)/enabled_plugins \
-           $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmq-plugins list -m | tr '\n' ' ')
-       MAKE="$(MAKE)" \
-         RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \
-         RABBITMQ_ENABLED_PLUGINS_FILE=$(TEST_TMPDIR)/enabled_plugins \
-         RABBITMQ_LOG_BASE=$(TEST_TMPDIR)/log \
-         RABBITMQ_MNESIA_BASE=$(TEST_TMPDIR)/$(NODENAME) \
-         RABBITMQ_PID_FILE=$(TEST_TMPDIR)/$(NODENAME).pid \
-         RABBITMQ_NODENAME=$(NODENAME) \
-         RABBITMQ_SERVER_START_ARGS=$(1) \
-         $(2) $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmq-server
-endef
-
-# Commands to run the package's test suite
-#
-# $(1): Extra .ezs to copy into the plugins dir
-define run_with_broker_tests
-$(if $(WITH_BROKER_TEST_COMMANDS)$(WITH_BROKER_TEST_SCRIPTS),$(call run_with_broker_tests_aux,$1))
-endef
-
-define run_with_broker_tests_aux
-       $(call run_broker,'-pa $(TEST_EBIN_DIR) -coverage directories ["$(EBIN_DIR)"$(COMMA)"$(TEST_EBIN_DIR)"]',RABBITMQ_CONFIG_FILE=$(WITH_BROKER_TEST_CONFIG),$(1)) &
-       $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmqctl -n $(NODENAME) wait $(TEST_TMPDIR)/$(NODENAME).pid
-       echo > $(TEST_TMPDIR)/rabbit-test-output && \
-       if $(foreach SCRIPT,$(WITH_BROKER_SETUP_SCRIPTS),$(SCRIPT) &&) \
-           $(foreach CMD,$(WITH_BROKER_TEST_COMMANDS), \
-            echo >> $(TEST_TMPDIR)/rabbit-test-output && \
-            echo "$(CMD)." \
-               | tee -a $(TEST_TMPDIR)/rabbit-test-output \
-               | $(ERL_CALL) $(ERL_CALL_OPTS) \
-               | tee -a $(TEST_TMPDIR)/rabbit-test-output \
-               | egrep "{ok, (ok|passed)}" >/dev/null &&) \
-           MAKE="$(MAKE)" RABBITMQ_NODENAME="$(NODENAME)" \
-             $(foreach SCRIPT,$(WITH_BROKER_TEST_SCRIPTS),$(SCRIPT) &&) : ; \
-        then \
-         touch $(TEST_TMPDIR)/.passed ; \
-         printf "\nPASSED\n" ; \
-       else \
-         cat $(TEST_TMPDIR)/rabbit-test-output ; \
-         printf "\n\nFAILED\n" ; \
-       fi
-       sleep 1
-       echo "rabbit_misc:report_cover(), init:stop()." | $(ERL_CALL) $(ERL_CALL_OPTS)
-       sleep 1
-       test -f $(TEST_TMPDIR)/.passed
-endef
-
-# The targets common to all integrated packages
-define package_rules
-
-# Put all relevant ezs into the dist dir for this package, including
-# the main ez file produced by this package
-#
-# When the package version changes, our .ez filename will change, and
-# we need to regenerate the dist directory.  So the dependency needs
-# to go via a stamp file that incorporates the version in its name.
-# But we need a target with a fixed name for other packages to depend
-# on.  And it can't be a phony, as a phony will always get rebuilt.
-# Hence the need for two stamp files here.
-$(PACKAGE_DIR)/dist/.done: $(PACKAGE_DIR)/dist/.done.$(PACKAGE_VERSION)
-       touch $$@
-
-$(PACKAGE_DIR)/dist/.done.$(PACKAGE_VERSION): $(PACKAGE_DIR)/build/dep-ezs/.done $(APP_DONE)
-       rm -rf $$(@D)
-       mkdir -p $$(@D)
-       cd $(dir $(APP_DIR)) && zip -q -r $$(abspath $(EZ_FILE)) $(notdir $(APP_DIR))
-       $$(call copy,$$(wildcard $$(<D)/*.ez),$(PACKAGE_DIR)/dist)
-       touch $$@
-
-# Gather all the ezs from dependency packages
-$(PACKAGE_DIR)/build/dep-ezs/.done: $(foreach P,$(DEP_PATHS),$(P)/dist/.done)
-       rm -rf $$(@D)
-       mkdir -p $$(@D)
-       @echo [elided] copy dependent ezs
-       @$(if $(DEP_PATHS),$(foreach P,$(DEP_PATHS),$$(call copy,$$(wildcard $(P)/dist/*.ez),$$(@D),&&)) :)
-       touch $$@
-
-# Put together the main app tree for this package
-$(APP_DONE): $(EBIN_BEAMS) $(INCLUDE_HRLS) $(APP_FILE) $(CONSTRUCT_APP_PREREQS)
-       rm -rf $$(@D)
-       mkdir -p $(APP_DIR)/ebin $(APP_DIR)/include
-       @echo [elided] copy beams to ebin
-       @$(call copy,$(EBIN_BEAMS),$(APP_DIR)/ebin)
-       cp -p $(APP_FILE) $(APP_DIR)/ebin/$(APP_NAME).app
-       $(call copy,$(INCLUDE_HRLS),$(APP_DIR)/include)
-       $(construct_app_commands)
-       touch $$@
-
-# Copy the .app file into place, set its version number
-$(APP_FILE): $(ORIGINAL_APP_FILE)
-       @mkdir -p $$(@D)
-       sed -e 's|{vsn, *\"[^\"]*\"|{vsn,\"$(PACKAGE_VERSION)\"|' <$$< >$$@
-
-ifndef DO_NOT_GENERATE_APP_FILE
-
-# Generate the .app file. Note that this is a separate step from above
-# so that the plugin still works correctly when symlinked as a directory
-$(ORIGINAL_APP_FILE): $(ORIGINAL_APP_SOURCE) $(SOURCE_ERLS) $(UMBRELLA_BASE_DIR)/generate_app
-       @mkdir -p $$(@D)
-       escript $(UMBRELLA_BASE_DIR)/generate_app $$< $$@ $(SOURCE_DIRS)
-
-$(PACKAGE_DIR)+clean::
-       rm -f $(ORIGINAL_APP_FILE)
-
-endif
-
-# Unpack the ezs from dependency packages, so that their contents are
-# accessible to erlc
-$(PACKAGE_DIR)/build/dep-apps/.done: $(PACKAGE_DIR)/build/dep-ezs/.done
-       rm -rf $$(@D)
-       mkdir -p $$(@D)
-       @echo [elided] unzip ezs
-       @cd $$(@D) && $$(foreach EZ,$$(wildcard $(PACKAGE_DIR)/build/dep-ezs/*.ez),unzip -q $$(abspath $$(EZ)) &&) :
-       touch $$@
-
-# Dependency autogeneration.  This is complicated slightly by the need
-# to generate a dependency file which is path-independent.
-$(DEPS_FILE): $(SOURCE_ERLS) $(INCLUDE_HRLS) $(TEST_SOURCE_ERLS)
-       @mkdir -p $$(@D)
-       @echo [elided] generate deps
-       @$$(if $$^,echo $$(subst : ,:,$$(foreach F,$$^,$$(abspath $$(F)):)) | escript $(abspath $(UMBRELLA_BASE_DIR)/generate_deps) $$@ '$$$$(EBIN_DIR)',echo >$$@)
-       @echo [elided] fix test deps
-       @$$(foreach F,$(TEST_EBIN_BEAMS),sed -e 's|^$$$$(EBIN_DIR)/$$(notdir $$(F)):|$$$$(TEST_EBIN_DIR)/$$(notdir $$(F)):|' $$@ > $$@.tmp && mv $$@.tmp $$@ && ) :
-       sed -e 's|$$@|$$$$(DEPS_FILE)|' $$@ > $$@.tmp && mv $$@.tmp $$@
-
-$(eval $(call safe_include,$(DEPS_FILE)))
-
-$(PACKAGE_DIR)/srcdist/.done: $(PACKAGE_DIR)/srcdist/.done.$(PACKAGE_VERSION)
-       touch $$@
-
-$(PACKAGE_DIR)/srcdist/.done.$(PACKAGE_VERSION):
-       mkdir -p $(PACKAGE_DIR)/build/srcdist/
-       rsync -a --exclude '.hg*' --exclude '.git*' --exclude 'build' $(PACKAGE_DIR) $(PACKAGE_DIR)/build/srcdist/$(APP_NAME)-$(PACKAGE_VERSION)
-       mkdir -p $(PACKAGE_DIR)/srcdist/
-       tar cjf $(PACKAGE_DIR)/srcdist/$(APP_NAME)-$(PACKAGE_VERSION)-src.tar.bz2 -C $(PACKAGE_DIR)/build/srcdist/ $(APP_NAME)-$(PACKAGE_VERSION)
-       touch $$@
-
-$(PACKAGE_DIR)+clean::
-       rm -rf $(EBIN_DIR)/*.beam $(TEST_EBIN_DIR)/*.beam $(PACKAGE_DIR)/dist $(PACKAGE_DIR)/srcdist $(PACKAGE_DIR)/build $(PACKAGE_DIR)/erl_crash.dump
-
-$(PACKAGE_DIR)+clean-with-deps:: $(foreach P,$(DEP_PATHS),$(P)+clean-with-deps)
-
-ifdef RELEASABLE
-all-releasable:: $(PACKAGE_DIR)/dist/.done
-
-copy-releasable:: $(PACKAGE_DIR)/dist/.done
-       cp $(PACKAGE_DIR)/dist/*.ez $(PLUGINS_DIST_DIR)
-
-copy-srcdist:: $(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done
-
-endif
-
-$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done:: $(ORIGINAL_APP_FILE) $(foreach P,$(DEP_PATHS),$(PLUGINS_SRC_DIST_DIR)/$(P)/.srcdist_done)
-       rsync -a --exclude '.hg*' --exclude '.git*' $(PACKAGE_DIR) $(PLUGINS_SRC_DIST_DIR)/
-       [ -f $(PACKAGE_DIR)/license_info ] && cp $(PACKAGE_DIR)/license_info $(PLUGINS_SRC_DIST_DIR)/licensing/license_info_$(PACKAGE_NAME) || true
-       find $(PACKAGE_DIR) -maxdepth 1 -name 'LICENSE-*' -exec cp '{}' $(PLUGINS_SRC_DIST_DIR)/licensing/ \;
-       touch $(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done
-
-# A hook to allow packages to verify that prerequisites are satisfied
-# before running.
-.PHONY: $(PACKAGE_DIR)+pre-run
-$(PACKAGE_DIR)+pre-run::
-
-# Run erlang with the package, its tests, and all its dependencies
-# available.
-.PHONY: $(PACKAGE_DIR)+run
-$(PACKAGE_DIR)+run: $(PACKAGE_DIR)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-run
-       ERL_LIBS=$(PACKAGE_DIR)/dist $(ERL) $(ERL_OPTS) -pa $(TEST_EBIN_DIR)
-
-# Run the broker with the package, its tests, and all its dependencies
-# available.
-.PHONY: $(PACKAGE_DIR)+run-in-broker
-$(PACKAGE_DIR)+run-in-broker: $(PACKAGE_DIR)/dist/.done $(RABBITMQ_SERVER_PATH)/dist/.done $(TEST_EBIN_BEAMS)
-       $(call run_broker,'-pa $(TEST_EBIN_DIR)',RABBITMQ_ALLOW_INPUT=true)
-
-# A hook to allow packages to verify that prerequisites are satisfied
-# before running tests.
-.PHONY: $(PACKAGE_DIR)+pre-test
-$(PACKAGE_DIR)+pre-test::
-
-# Runs the package's tests that operate within (or in conjuction with)
-# a running broker.
-.PHONY: $(PACKAGE_DIR)+in-broker-test
-$(PACKAGE_DIR)+in-broker-test: $(PACKAGE_DIR)/dist/.done $(RABBITMQ_SERVER_PATH)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-test $(PACKAGE_DIR)+standalone-test $(if $(RELEASABLE),$(call chain_test,$(PACKAGE_DIR)+in-broker-test))
-       $(call run_with_broker_tests)
-
-# Running the coverage tests requires Erlang/OTP R14. Note that
-# coverage only covers the in-broker tests.
-.PHONY: $(PACKAGE_DIR)+coverage
-$(PACKAGE_DIR)+coverage: $(PACKAGE_DIR)/dist/.done $(COVERAGE_PATH)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-test
-       $(call run_with_broker_tests,$(COVERAGE_PATH)/dist/*.ez)
-
-# Runs the package's tests that don't need a running broker
-.PHONY: $(PACKAGE_DIR)+standalone-test
-$(PACKAGE_DIR)+standalone-test: $(PACKAGE_DIR)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-test $(if $(RELEASABLE),$(call chain_test,$(PACKAGE_DIR)+standalone-test))
-       $$(if $(STANDALONE_TEST_COMMANDS),\
-         $$(foreach CMD,$(STANDALONE_TEST_COMMANDS),\
-           ERL_LIBS=$(PACKAGE_DIR)/dist $(ERL) -noinput $(ERL_OPTS) -pa $(TEST_EBIN_DIR) -sname standalone_test -eval "init:stop(case $$(CMD) of ok -> 0; passed -> 0; _Else -> 1 end)" &&\
-         )\
-       :)
-       $$(if $(STANDALONE_TEST_SCRIPTS),$$(foreach SCRIPT,$(STANDALONE_TEST_SCRIPTS),$$(SCRIPT) &&) :)
-
-# Run all the package's tests
-.PHONY: $(PACKAGE_DIR)+test
-$(PACKAGE_DIR)+test:: $(PACKAGE_DIR)+standalone-test $(PACKAGE_DIR)+in-broker-test
-
-.PHONY: $(PACKAGE_DIR)+check-xref
-$(PACKAGE_DIR)+check-xref: $(PACKAGE_DIR)/dist/.done
-       UNPACKDIR=$$$$(mktemp -d $(TMPDIR)/tmp.XXXXXXXXXX) && \
-       for ez in $$$$(find $(PACKAGE_DIR)/dist -type f -name "*.ez"); do \
-         unzip -q $$$${ez} -d $$$${UNPACKDIR}; \
-       done && \
-       rm -rf $$$${UNPACKDIR}/rabbit_common-* && \
-       ln -sf $$$$(pwd)/$(RABBITMQ_SERVER_PATH)/ebin $$$${UNPACKDIR} && \
-       OK=true && \
-       { $(UMBRELLA_BASE_DIR)/check_xref $(PACKAGE_DIR) $$$${UNPACKDIR} || OK=false; } && \
-       rm -rf $$$${UNPACKDIR} && \
-       $$$${OK}
-
-check-xref-packages:: $(PACKAGE_DIR)+check-xref
-
-endef
-$(eval $(package_rules))
-
-# Recursing into dependency packages has to be the last thing we do
-# because it will trample all over the per-package variables.
-
-# Recurse into dependency packages
-$(foreach DEP_PATH,$(DEP_PATHS),$(eval $(call do_package,$(DEP_PATH))))
-
-else # NON_INTEGRATED_$(PACKAGE_DIR)
-
-define package_rules
-
-# When the package version changes, our .ez filename will change, and
-# we need to regenerate the dist directory.  So the dependency needs
-# to go via a stamp file that incorporates the version in its name.
-# But we need a target with a fixed name for other packages to depend
-# on.  And it can't be a phony, as a phony will always get rebuilt.
-# Hence the need for two stamp files here.
-$(PACKAGE_DIR)/dist/.done: $(PACKAGE_DIR)/dist/.done.$(VERSION)
-       touch $$@
-
-# Non-integrated packages (rabbitmq-server and rabbitmq-erlang-client)
-# present a dilemma.  We could re-make the package every time we need
-# it.  But that will cause a huge amount of unnecessary rebuilding.
-# Or we could not worry about rebuilding non-integrated packages.
-# That's good for those developing plugins, but not for those who want
-# to work on the broker and erlang client in the context of the
-# plugins.  So instead, we use a conservative approximation to the
-# dependency structure within the package, to tell when to re-run the
-# makefile.
-$(PACKAGE_DIR)/dist/.done.$(VERSION): $(PACKAGE_DIR)/Makefile $(wildcard $(PACKAGE_DIR)/*.mk) $(wildcard $(PACKAGE_DIR)/src/*.erl) $(wildcard $(PACKAGE_DIR)/include/*.hrl) $(wildcard $(PACKAGE_DIR)/*.py) $(foreach DEP,$(NON_INTEGRATED_DEPS_$(PACKAGE_DIR)),$(call package_to_path,$(DEP))/dist/.done)
-       rm -rf $$(@D)
-       $$(MAKE) -C $(PACKAGE_DIR)
-       mkdir -p $$(@D)
-       touch $$@
-
-# When building plugins-src we want to "make clean", but some
-# non-integrated packages will not be there. Don't fall over in that case.
-$(PACKAGE_DIR)+clean::
-       if [ -d $(PACKAGE_DIR) ] ; then $$(MAKE) -C $(PACKAGE_DIR) clean ; fi
-       rm -rf $(PACKAGE_DIR)/dist
-
-endef
-$(eval $(package_rules))
-
-endif # NON_INTEGRATED_$(PACKAGE_DIR)
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/eldap-wrapper/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/Makefile b/rabbitmq-server/plugins-src/eldap-wrapper/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch
deleted file mode 100644 (file)
index 90ad3d2..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-diff -Naur eldap.orig/ebin/eldap.app eldap/ebin/eldap.app
---- eldap.orig/ebin/eldap.app  1970-01-01 01:00:00.000000000 +0100
-+++ eldap/ebin/eldap.app       2011-01-20 12:47:04.377399296 +0000
-@@ -0,0 +1,10 @@
-+{application, eldap,
-+ [{description, "LDAP Client Library"},
-+  {vsn, "0.01"},
-+  {modules, [
-+    eldap,
-+    'ELDAPv3'
-+  ]},
-+  {registered, []},
-+  {applications, [kernel, stdlib]}
-+ ]}.
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/.done b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/.done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile
deleted file mode 100644 (file)
index f5ecba4..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-
-all:
-       (cd src;$(MAKE))
-
-clean:
-       (cd src;$(MAKE) clean)
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README
deleted file mode 100644 (file)
index e1bde9d..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-Hi, 
-
-This is 'eldap', the Erlang LDAP library.
-
-It exports an API that can do all possible operations
-you may want to do against an LDAP server. The code has
-been tested to work at some point, but only the bind
-and search operations are running daily in our products,
-so there may be bugs lurking in some parts of the code.
-
-To just use eldap for doing authentication, do like in:
-
- {ok,X} = eldap:open(["ldap.mycorp.com"], []).
- eldap:simple_bind(X, "uid=tobbe,ou=People,dc=mycorp,dc=com", "passwd").
-
-In the doc/README.example you'll find a trace from a
-Erlang shell session as an example on how to setup a
-connection, authenticate (bind) and perform a search.
-Note that by using the option {ssl, true}, you should
-be able to setup an SSL tunnel (LDAPS) if your Erlang
-system has been configured with SSL.
-
-In the test directory there are some hints and examples
-on how to test the code and how to setup and populate 
-an OpenLDAP server. The 'eldap' code has been tested
-agains OpenLDAP, IPlanet and ActiveDirectory servers.
-
-If you plan to incorporate this code into your system
-I suggest that you build a server/supervisor harnesk
-that uses 'eldap' (as we have done in our products).
-
-Good luck ! 
-/Tobbe
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example
deleted file mode 100644 (file)
index b96d5ef..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-1> {_,S} = eldap:open(["192.168.128.47"], []).
-{ok,<0.30.0>}
-2> eldap:simple_bind(S,"cn=Torbjorn Tornkvist,cn=Users,dc=bluetail,dc=com","qwe123").
-ok
-3> Base = {base, "dc=bluetail,dc=com"}.
-{base,"dc=bluetail,dc=com"}
-4> Scope = {scope, eldap:wholeSubtree()}.
-{scope,wholeSubtree}
-5> Filter = {filter, eldap:equalityMatch("sAMAccountName", "tobbe")}.
-{filter,{equalityMatch,{'AttributeValueAssertion',"sAMAccountName","tobbe"}}}
-6> Search = [Base, Scope, Filter].
-[{base,"dc=bluetail,dc=com"},
- {scope,wholeSubtree},
- {filter,{equalityMatch,{'AttributeValueAssertion',"sAMAccountName","tobbe"}}}]
-7> eldap:search(S, Search).
-{ok,{eldap_search_result,[{eldap_entry,
-                              "CN=Torbjorn Tornkvist,CN=Users,DC=bluetail,DC=com",
-                              [{"manager",
-                                ["CN=Tord Larsson,CN=Users,DC=bluetail,DC=com"]},
-                               {"memberOf",
-                                ["CN=TestGroup2,CN=Users,DC=bluetail,DC=com",
-                                 "CN=TestGroup,CN=Users,DC=bluetail,DC=com",
-                                 "CN=Pre-Windows 2000 Compatible Access,CN=Builtin,DC=bluetail,DC=com",
-                                 "CN=Server Operators,CN=Builtin,DC=bluetail,DC=com"]},
-                               {"accountExpires",["0"]},
-                               {"adminCount",["1"]},
-                               {"badPasswordTime",["127119104851642448"]},
-                               {"badPwdCount",["0"]},
-                               {"codePage",["0"]},
-                               {"cn",["Torbjorn Tornkvist"]},
-                               {"company",["Alteon Web Systems"]},
-                               {"countryCode",["0"]},
-                               {"department",["Bluetail"]},
-                               {"displayName",["Torbjorn Tornkvist"]},
-                               {"mail",["tobbe@bluetail.com"]},
-                               {"givenName",["Torbjorn"]},
-                               {"instanceType",["4"]},
-                               {"lastLogoff",["0"]},
-                               {"lastLogon",["127119109376267104"]},
-                               {"logonCount",[...]},
-                               {"msNPAllowDialin"|...},
-                               {...}|...]}],
-                         [["ldap://bluetail.com/CN=Configuration,DC=bluetail,DC=com"]]}}
-8> 
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc
deleted file mode 100644 (file)
index e236da3..0000000
+++ /dev/null
@@ -1 +0,0 @@
-This is 'eldap', the Erlang LDAP library. 
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app
deleted file mode 100644 (file)
index 3c4e87e..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{application, eldap,
- [{description, "LDAP Client Library"},
-  {vsn, "0.01"},
-  {modules, [
-    eldap,
-    'ELDAPv3'
-  ]},
-  {registered, []},
-  {applications, [kernel, stdlib]}
- ]}.
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl
deleted file mode 100644 (file)
index ee5ad2f..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
--ifndef( _ELDAP_HRL ).
--define( _ELDAP_HRL , 1 ).
-
-%%%
-%%% Search input parameters
-%%%
--record(eldap_search, {
-         base = [],             % Baseobject
-         filter = [],           % Search conditions
-         scope,                 % Search scope
-         attributes = [],       % Attributes to be returned
-         types_only = false,    % Return types+values or types
-         timeout = 0            % Timelimit for search
-        }).
-
-%%%
-%%% Returned search result
-%%%
--record(eldap_search_result, {
-         entries = [],          % List of #eldap_entry{} records
-         referrals = []         % List of referrals
-         }).
-
-%%%
-%%% LDAP entry
-%%%
--record(eldap_entry, {
-         object_name = "",      % The DN for the entry
-         attributes = []        % List of {Attribute, Value} pairs
-        }).
-
--endif.
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn
deleted file mode 100644 (file)
index 0cfac48..0000000
+++ /dev/null
@@ -1,291 +0,0 @@
--- LDAPv3 ASN.1 specification, taken from RFC 2251
-
--- Lightweight-Directory-Access-Protocol-V3 DEFINITIONS
-ELDAPv3 DEFINITIONS
-IMPLICIT TAGS ::=
-
-BEGIN
-
-LDAPMessage ::= SEQUENCE {
-        messageID       MessageID,
-        protocolOp      CHOICE {
-                bindRequest     BindRequest,
-                bindResponse    BindResponse,
-                unbindRequest   UnbindRequest,
-                searchRequest   SearchRequest,
-                searchResEntry  SearchResultEntry,
-                searchResDone   SearchResultDone,
-                searchResRef    SearchResultReference,
-                modifyRequest   ModifyRequest,
-                modifyResponse  ModifyResponse,
-                addRequest      AddRequest,
-                addResponse     AddResponse,
-                delRequest      DelRequest,
-                delResponse     DelResponse,
-                modDNRequest    ModifyDNRequest,
-                modDNResponse   ModifyDNResponse,
-                compareRequest  CompareRequest,
-                compareResponse CompareResponse,
-                abandonRequest  AbandonRequest,
-                extendedReq     ExtendedRequest,
-                extendedResp    ExtendedResponse },
-         controls       [0] Controls OPTIONAL }
-
-MessageID ::= INTEGER (0 .. maxInt)
-
-maxInt INTEGER ::= 2147483647 -- (2^^31 - 1) --
-
-LDAPString ::= OCTET STRING
-
-LDAPOID ::= OCTET STRING
-
-LDAPDN ::= LDAPString
-
-RelativeLDAPDN ::= LDAPString
-
-AttributeType ::= LDAPString
-
-AttributeDescription ::= LDAPString
-
-
-
-
--- Wahl, et. al.               Standards Track                    [Page 44]
--- \f
--- RFC 2251                         LDAPv3                    December 1997
-
-
-AttributeDescriptionList ::= SEQUENCE OF
-        AttributeDescription
-
-AttributeValue ::= OCTET STRING
-
-AttributeValueAssertion ::= SEQUENCE {
-        attributeDesc   AttributeDescription,
-        assertionValue  AssertionValue }
-
-AssertionValue ::= OCTET STRING
-
-Attribute ::= SEQUENCE {
-        type    AttributeDescription,
-        vals    SET OF AttributeValue }
-
-MatchingRuleId ::= LDAPString
-
-LDAPResult ::= SEQUENCE {
-        resultCode      ENUMERATED {
-                     success                      (0),
-                     operationsError              (1),
-                     protocolError                (2),
-                     timeLimitExceeded            (3),
-                     sizeLimitExceeded            (4),
-                     compareFalse                 (5),
-                     compareTrue                  (6),
-                     authMethodNotSupported       (7),
-                     strongAuthRequired           (8),
-                                -- 9 reserved --
-                     referral                     (10),  -- new
-                     adminLimitExceeded           (11),  -- new
-                     unavailableCriticalExtension (12),  -- new
-                     confidentialityRequired      (13),  -- new
-                     saslBindInProgress           (14),  -- new
-                     noSuchAttribute              (16),
-                     undefinedAttributeType       (17),
-                     inappropriateMatching        (18),
-                     constraintViolation          (19),
-                     attributeOrValueExists       (20),
-                     invalidAttributeSyntax       (21),
-                                -- 22-31 unused --
-                     noSuchObject                 (32),
-                     aliasProblem                 (33),
-                     invalidDNSyntax              (34),
-                     -- 35 reserved for undefined isLeaf --
-                     aliasDereferencingProblem    (36),
-                                -- 37-47 unused --
-                     inappropriateAuthentication  (48),
-
--- Wahl, et. al.               Standards Track                    [Page 45]
--- \f
--- RFC 2251                         LDAPv3                    December 1997
-
-
-                     invalidCredentials           (49),
-                     insufficientAccessRights     (50),
-                     busy                         (51),
-                     unavailable                  (52),
-                     unwillingToPerform           (53),
-                     loopDetect                   (54),
-                                -- 55-63 unused --
-                     namingViolation              (64),
-                     objectClassViolation         (65),
-                     notAllowedOnNonLeaf          (66),
-                     notAllowedOnRDN              (67),
-                     entryAlreadyExists           (68),
-                     objectClassModsProhibited    (69),
-                                -- 70 reserved for CLDAP --
-                     affectsMultipleDSAs          (71), -- new
-                                -- 72-79 unused --
-                     other                        (80) },
-                     -- 81-90 reserved for APIs --
-        matchedDN       LDAPDN,
-        errorMessage    LDAPString,
-        referral        [3] Referral OPTIONAL }
-
-Referral ::= SEQUENCE OF LDAPURL
-
-LDAPURL ::= LDAPString -- limited to characters permitted in URLs
-
-Controls ::= SEQUENCE OF Control
-
-Control ::= SEQUENCE {
-        controlType             LDAPOID,
-        criticality             BOOLEAN DEFAULT FALSE,
-        controlValue            OCTET STRING OPTIONAL }
-
-BindRequest ::= [APPLICATION 0] SEQUENCE {
-        version                 INTEGER (1 .. 127),
-        name                    LDAPDN,
-        authentication          AuthenticationChoice }
-
-AuthenticationChoice ::= CHOICE {
-        simple                  [0] OCTET STRING,
-                                 -- 1 and 2 reserved
-        sasl                    [3] SaslCredentials }
-
-SaslCredentials ::= SEQUENCE {
-        mechanism               LDAPString,
-        credentials             OCTET STRING OPTIONAL }
-
-BindResponse ::= [APPLICATION 1] SEQUENCE {
-
--- Wahl, et. al.               Standards Track                    [Page 46]
--- \f
--- RFC 2251                         LDAPv3                    December 1997
-
-
-     COMPONENTS OF LDAPResult,
-     serverSaslCreds    [7] OCTET STRING OPTIONAL }
-
-UnbindRequest ::= [APPLICATION 2] NULL
-
-SearchRequest ::= [APPLICATION 3] SEQUENCE {
-        baseObject      LDAPDN,
-        scope           ENUMERATED {
-                baseObject              (0),
-                singleLevel             (1),
-                wholeSubtree            (2) },
-        derefAliases    ENUMERATED {
-                neverDerefAliases       (0),
-                derefInSearching        (1),
-                derefFindingBaseObj     (2),
-                derefAlways             (3) },
-        sizeLimit       INTEGER (0 .. maxInt),
-        timeLimit       INTEGER (0 .. maxInt),
-        typesOnly       BOOLEAN,
-        filter          Filter,
-        attributes      AttributeDescriptionList }
-
-Filter ::= CHOICE {
-        and             [0] SET OF Filter,
-        or              [1] SET OF Filter,
-        not             [2] Filter,
-        equalityMatch   [3] AttributeValueAssertion,
-        substrings      [4] SubstringFilter,
-        greaterOrEqual  [5] AttributeValueAssertion,
-        lessOrEqual     [6] AttributeValueAssertion,
-        present         [7] AttributeDescription,
-        approxMatch     [8] AttributeValueAssertion,
-        extensibleMatch [9] MatchingRuleAssertion }
-
-SubstringFilter ::= SEQUENCE {
-        type            AttributeDescription,
-        -- at least one must be present
-        substrings      SEQUENCE OF CHOICE {
-                initial [0] LDAPString,
-                any     [1] LDAPString,
-                final   [2] LDAPString } }
-
-MatchingRuleAssertion ::= SEQUENCE {
-        matchingRule    [1] MatchingRuleId OPTIONAL,
-        type            [2] AttributeDescription OPTIONAL,
-        matchValue      [3] AssertionValue,
-        dnAttributes    [4] BOOLEAN DEFAULT FALSE }
-
--- Wahl, et. al.               Standards Track                    [Page 47]
--- \f
--- RFC 2251                         LDAPv3                    December 1997
-
-SearchResultEntry ::= [APPLICATION 4] SEQUENCE {
-        objectName      LDAPDN,
-        attributes      PartialAttributeList }
-
-PartialAttributeList ::= SEQUENCE OF SEQUENCE {
-        type    AttributeDescription,
-        vals    SET OF AttributeValue }
-
-SearchResultReference ::= [APPLICATION 19] SEQUENCE OF LDAPURL
-
-SearchResultDone ::= [APPLICATION 5] LDAPResult
-
-ModifyRequest ::= [APPLICATION 6] SEQUENCE {
-        object          LDAPDN,
-        modification    SEQUENCE OF SEQUENCE {
-                operation       ENUMERATED {
-                                        add     (0),
-                                        delete  (1),
-                                        replace (2) },
-                modification    AttributeTypeAndValues } }
-
-AttributeTypeAndValues ::= SEQUENCE {
-        type    AttributeDescription,
-        vals    SET OF AttributeValue }
-
-ModifyResponse ::= [APPLICATION 7] LDAPResult
-
-AddRequest ::= [APPLICATION 8] SEQUENCE {
-        entry           LDAPDN,
-        attributes      AttributeList }
-
-AttributeList ::= SEQUENCE OF SEQUENCE {
-        type    AttributeDescription,
-        vals    SET OF AttributeValue }
-
-AddResponse ::= [APPLICATION 9] LDAPResult
-
-DelRequest ::= [APPLICATION 10] LDAPDN
-
-DelResponse ::= [APPLICATION 11] LDAPResult
-
-ModifyDNRequest ::= [APPLICATION 12] SEQUENCE {
-        entry           LDAPDN,
-        newrdn          RelativeLDAPDN,
-        deleteoldrdn    BOOLEAN,
-        newSuperior     [0] LDAPDN OPTIONAL }
-
-ModifyDNResponse ::= [APPLICATION 13] LDAPResult
-
--- Wahl, et. al.               Standards Track                    [Page 48]
--- \f
--- RFC 2251                         LDAPv3                    December 1997
-
-
-CompareRequest ::= [APPLICATION 14] SEQUENCE {
-        entry           LDAPDN,
-        ava             AttributeValueAssertion }
-
-CompareResponse ::= [APPLICATION 15] LDAPResult
-
-AbandonRequest ::= [APPLICATION 16] MessageID
-
-ExtendedRequest ::= [APPLICATION 23] SEQUENCE {
-        requestName      [0] LDAPOID,
-        requestValue     [1] OCTET STRING OPTIONAL }
-
-ExtendedResponse ::= [APPLICATION 24] SEQUENCE {
-        COMPONENTS OF LDAPResult,
-        responseName     [10] LDAPOID OPTIONAL,
-        response         [11] OCTET STRING OPTIONAL }
-
-END
-
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile
deleted file mode 100644 (file)
index dc15604..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-
-ERLC = erlc
-EBIN_DIR = ../ebin
-ERLC_FLAGS += -I ./src -I ../include +debug_info
-ERL_OBJECTS := ${EBIN_DIR}/eldap.beam ${EBIN_DIR}/ELDAPv3.beam ${EBIN_DIR}/eldap_fsm.beam 
-
-.SUFFIXES: .asn .erl .beam
-
-$(EBIN_DIR)/%.beam: %.erl
-       $(ERLC) $(ERLC_FLAGS) -o $(EBIN_DIR) $<
-
-.PHONY: all
-all: asn $(ERL_OBJECTS)
-
-.PHONY: asn
-asn: ELDAPv3.erl ../ebin/ELDAPv3.beam
-
-ELDAPv3.erl: ELDAPv3.asn
-       ${ERLC} ELDAPv3.asn
-       mv ELDAPv3.beam ${EBIN_DIR}
-
-.PHONY: clean
-clean:
-       -rm $(ERL_OBJECTS) ELDAPv3.erl ELDAPv3.asn1db ELDAPv3.hrl
-
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl
deleted file mode 100644 (file)
index b8422f2..0000000
+++ /dev/null
@@ -1,1078 +0,0 @@
--module(eldap).
-%%% --------------------------------------------------------------------
-%%% Created:  12 Oct 2000 by Tobbe <tnt@home.se>
-%%% Function: Erlang client LDAP implementation according RFC 2251,2253
-%%%           and 2255. The interface is based on RFC 1823, and
-%%%           draft-ietf-asid-ldap-c-api-00.txt
-%%%
-%%% Copyright (c) 2010 Torbjorn Tornkvist
-%%% See MIT-LICENSE at the top dir for licensing information.
-%%% --------------------------------------------------------------------
--vc('$Id$ ').
--export([open/1,open/2,simple_bind/3,controlling_process/2,
-        baseObject/0,singleLevel/0,wholeSubtree/0,close/1,
-        equalityMatch/2,greaterOrEqual/2,lessOrEqual/2,
-        approxMatch/2,search/2,substrings/2,present/1,
-        'and'/1,'or'/1,'not'/1,modify/3, mod_add/2, mod_delete/2,
-        mod_replace/2, add/3, delete/2, modify_dn/5,parse_dn/1,
-        parse_ldap_url/1]).
-
--import(lists,[concat/1]).
-
--include("ELDAPv3.hrl").
--include("eldap.hrl").
-
--define(LDAP_VERSION, 3).
--define(LDAP_PORT, 389).
--define(LDAPS_PORT, 636).
-
--record(eldap, {version = ?LDAP_VERSION,
-               host,                % Host running LDAP server
-               port = ?LDAP_PORT,   % The LDAP server port
-               fd,                  % Socket filedescriptor.
-               binddn = "",         % Name of the entry to bind as
-               passwd,              % Password for (above) entry
-               id = 0,              % LDAP Request ID 
-               log,                 % User provided log function
-               timeout = infinity,  % Request timeout
-               anon_auth = false,   % Allow anonymous authentication
-               use_tls = false      % LDAP/LDAPS
-              }).
-
-%%% For debug purposes
-%%-define(PRINT(S, A), io:fwrite("~w(~w): " ++ S, [?MODULE,?LINE|A])).
--define(PRINT(S, A), true).
-
--define(elog(S, A), error_logger:info_msg("~w(~w): "++S,[?MODULE,?LINE|A])).
-
-%%% ====================================================================
-%%% Exported interface
-%%% ====================================================================
-
-%%% --------------------------------------------------------------------
-%%% open(Hosts [,Opts] )
-%%% --------------------
-%%% Setup a connection to on of the Hosts in the argument
-%%% list. Stop at the first successful connection attempt.
-%%% Valid Opts are:      Where:
-%%%
-%%%    {port, Port}        - Port is the port number 
-%%%    {log, F}            - F(LogLevel, FormatString, ListOfArgs)
-%%%    {timeout, milliSec} - request timeout
-%%%
-%%% --------------------------------------------------------------------
-open(Hosts) -> 
-    open(Hosts, []).
-
-open(Hosts, Opts) when list(Hosts), list(Opts) ->
-    Self = self(),
-    Pid = spawn_link(fun() -> init(Hosts, Opts, Self) end),
-    recv(Pid).
-
-%%% --------------------------------------------------------------------
-%%% Shutdown connection (and process) asynchronous.
-%%% --------------------------------------------------------------------
-
-close(Handle) when pid(Handle) ->
-    send(Handle, close).
-
-%%% --------------------------------------------------------------------
-%%% Set who we should link ourselves to
-%%% --------------------------------------------------------------------
-
-controlling_process(Handle, Pid) when pid(Handle),pid(Pid)  ->
-    link(Pid),
-    send(Handle, {cnt_proc, Pid}),
-    recv(Handle).
-
-%%% --------------------------------------------------------------------
-%%% Authenticate ourselves to the Directory 
-%%% using simple authentication.
-%%%
-%%%  Dn      -  The name of the entry to bind as
-%%%  Passwd  -  The password to be used
-%%%
-%%%  Returns: ok | {error, Error}
-%%% --------------------------------------------------------------------
-simple_bind(Handle, Dn, Passwd) when pid(Handle)  ->
-    send(Handle, {simple_bind, Dn, Passwd}),
-    recv(Handle).
-
-%%% --------------------------------------------------------------------
-%%% Add an entry. The entry field MUST NOT exist for the AddRequest
-%%% to succeed. The parent of the entry MUST exist.
-%%% Example:
-%%%
-%%%  add(Handle, 
-%%%         "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-%%%         [{"objectclass", ["person"]},
-%%%          {"cn", ["Bill Valentine"]},
-%%%          {"sn", ["Valentine"]},
-%%%          {"telephoneNumber", ["545 555 00"]}]
-%%%     )
-%%% --------------------------------------------------------------------
-add(Handle, Entry, Attributes) when pid(Handle),list(Entry),list(Attributes) ->
-    send(Handle, {add, Entry, add_attrs(Attributes)}),
-    recv(Handle).
-
-%%% Do sanity check !
-add_attrs(Attrs) ->
-    F = fun({Type,Vals}) when list(Type),list(Vals) -> 
-               %% Confused ? Me too... :-/
-               {'AddRequest_attributes',Type, Vals} 
-       end,
-    case catch lists:map(F, Attrs) of
-       {'EXIT', _} -> throw({error, attribute_values});
-       Else        -> Else
-    end.
-
-%%% --------------------------------------------------------------------
-%%% Delete an entry. The entry consists of the DN of 
-%%% the entry to be deleted.
-%%% Example:
-%%%
-%%%  delete(Handle, 
-%%%         "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com"
-%%%        )
-%%% --------------------------------------------------------------------
-delete(Handle, Entry) when pid(Handle), list(Entry) ->
-    send(Handle, {delete, Entry}),
-    recv(Handle).
-
-%%% --------------------------------------------------------------------
-%%% Modify an entry. Given an entry a number of modification
-%%% operations can be performed as one atomic operation.
-%%% Example:
-%%%
-%%%  modify(Handle, 
-%%%         "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-%%%         [replace("telephoneNumber", ["555 555 00"]),
-%%%          add("description", ["LDAP hacker"])] 
-%%%        )
-%%% --------------------------------------------------------------------
-modify(Handle, Object, Mods) when pid(Handle), list(Object), list(Mods) ->
-    send(Handle, {modify, Object, Mods}),
-    recv(Handle).
-
-%%%
-%%% Modification operations. 
-%%% Example:
-%%%            replace("telephoneNumber", ["555 555 00"])
-%%%
-mod_add(Type, Values) when list(Type), list(Values)     -> m(add, Type, Values).
-mod_delete(Type, Values) when list(Type), list(Values)  -> m(delete, Type, Values).
-mod_replace(Type, Values) when list(Type), list(Values) -> m(replace, Type, Values).
-
-m(Operation, Type, Values) ->
-    #'ModifyRequest_modification_SEQOF'{
-       operation = Operation,
-       modification = #'AttributeTypeAndValues'{
-        type = Type,
-        vals = Values}}.
-
-%%% --------------------------------------------------------------------
-%%% Modify an entry. Given an entry a number of modification
-%%% operations can be performed as one atomic operation.
-%%% Example:
-%%%
-%%%  modify_dn(Handle, 
-%%%    "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-%%%    "cn=Ben Emerson",
-%%%    true,
-%%%    ""
-%%%        )
-%%% --------------------------------------------------------------------
-modify_dn(Handle, Entry, NewRDN, DelOldRDN, NewSup) 
-  when pid(Handle),list(Entry),list(NewRDN),atom(DelOldRDN),list(NewSup) ->
-    send(Handle, {modify_dn, Entry, NewRDN, 
-                 bool_p(DelOldRDN), optional(NewSup)}),
-    recv(Handle).
-
-%%% Sanity checks !
-
-bool_p(Bool) when Bool==true;Bool==false -> Bool.
-
-optional([])    -> asn1_NOVALUE;
-optional(Value) -> Value.
-
-%%% --------------------------------------------------------------------
-%%% Synchronous search of the Directory returning a 
-%%% requested set of attributes.
-%%%
-%%%  Example:
-%%%
-%%%    Filter = eldap:substrings("sn", [{any,"o"}]),
-%%%    eldap:search(S, [{base, "dc=bluetail, dc=com"},
-%%%                     {filter, Filter},
-%%%                     {attributes,["cn"]}])),
-%%%
-%%% Returned result:  {ok, #eldap_search_result{}}
-%%%
-%%% Example:
-%%%
-%%%  {ok,{eldap_search_result,
-%%%        [{eldap_entry,
-%%%           "cn=Magnus Froberg, dc=bluetail, dc=com",
-%%%           [{"cn",["Magnus Froberg"]}]},
-%%%         {eldap_entry,
-%%%           "cn=Torbjorn Tornkvist, dc=bluetail, dc=com",
-%%%           [{"cn",["Torbjorn Tornkvist"]}]}],
-%%%        []}}
-%%%
-%%% --------------------------------------------------------------------
-search(Handle, A) when pid(Handle), record(A, eldap_search) ->
-    call_search(Handle, A);
-search(Handle, L) when pid(Handle), list(L) ->
-    case catch parse_search_args(L) of
-       {error, Emsg}                  -> {error, Emsg};
-       A when record(A, eldap_search) -> call_search(Handle, A)
-    end.
-
-call_search(Handle, A) ->
-    send(Handle, {search, A}),
-    recv(Handle).
-
-parse_search_args(Args) ->
-    parse_search_args(Args, #eldap_search{scope = wholeSubtree}).
-    
-parse_search_args([{base, Base}|T],A) ->
-    parse_search_args(T,A#eldap_search{base = Base});
-parse_search_args([{filter, Filter}|T],A) ->
-    parse_search_args(T,A#eldap_search{filter = Filter});
-parse_search_args([{scope, Scope}|T],A) ->
-    parse_search_args(T,A#eldap_search{scope = Scope});
-parse_search_args([{attributes, Attrs}|T],A) ->
-    parse_search_args(T,A#eldap_search{attributes = Attrs});
-parse_search_args([{types_only, TypesOnly}|T],A) ->
-    parse_search_args(T,A#eldap_search{types_only = TypesOnly});
-parse_search_args([{timeout, Timeout}|T],A) when integer(Timeout) ->
-    parse_search_args(T,A#eldap_search{timeout = Timeout});
-parse_search_args([H|_],_) ->
-    throw({error,{unknown_arg, H}});
-parse_search_args([],A) ->
-    A.
-
-%%%
-%%% The Scope parameter
-%%%
-baseObject()   -> baseObject.
-singleLevel()  -> singleLevel.
-wholeSubtree() -> wholeSubtree.
-
-%%%
-%%% Boolean filter operations
-%%%
-'and'(ListOfFilters) when list(ListOfFilters) -> {'and',ListOfFilters}.
-'or'(ListOfFilters)  when list(ListOfFilters) -> {'or', ListOfFilters}.
-'not'(Filter)        when tuple(Filter)       -> {'not',Filter}.
-
-%%%
-%%% The following Filter parameters consist of an attribute
-%%% and an attribute value. Example: F("uid","tobbe")
-%%%
-equalityMatch(Desc, Value)   -> {equalityMatch, av_assert(Desc, Value)}.
-greaterOrEqual(Desc, Value)  -> {greaterOrEqual, av_assert(Desc, Value)}.
-lessOrEqual(Desc, Value)     -> {lessOrEqual, av_assert(Desc, Value)}.
-approxMatch(Desc, Value)     -> {approxMatch, av_assert(Desc, Value)}.
-
-av_assert(Desc, Value) ->
-    #'AttributeValueAssertion'{attributeDesc  = Desc,
-                              assertionValue = Value}.
-
-%%%
-%%% Filter to check for the presence of an attribute
-%%%
-present(Attribute) when list(Attribute) -> 
-    {present, Attribute}.
-
-
-%%%
-%%% A substring filter seem to be based on a pattern:
-%%%
-%%%   InitValue*AnyValue*FinalValue
-%%%
-%%% where all three parts seem to be optional (at least when
-%%% talking with an OpenLDAP server). Thus, the arguments
-%%% to substrings/2 looks like this:
-%%%
-%%% Type   ::= string( <attribute> )
-%%% SubStr ::= listof( {initial,Value} | {any,Value}, {final,Value})
-%%%
-%%% Example: substrings("sn",[{initial,"To"},{any,"kv"},{final,"st"}])
-%%% will match entries containing:  'sn: Tornkvist'
-%%%
-substrings(Type, SubStr) when list(Type), list(SubStr) -> 
-    Ss = {'SubstringFilter_substrings',v_substr(SubStr)},
-    {substrings,#'SubstringFilter'{type = Type,
-                                  substrings = Ss}}.
-    
-%%% --------------------------------------------------------------------
-%%% Worker process. We keep track of a controlling process to
-%%% be able to terminate together with it.
-%%% --------------------------------------------------------------------
-
-init(Hosts, Opts, Cpid) ->
-    Data = parse_args(Opts, Cpid, #eldap{}),
-    case try_connect(Hosts, Data) of
-       {ok,Data2} ->
-           send(Cpid, {ok,self()}),
-           put(req_timeout, Data#eldap.timeout), % kludge...
-           loop(Cpid, Data2);
-       Else ->
-           send(Cpid, Else),
-           unlink(Cpid),
-           exit(Else)
-    end.
-
-parse_args([{port, Port}|T], Cpid, Data) when integer(Port) ->
-    parse_args(T, Cpid, Data#eldap{port = Port});
-parse_args([{timeout, Timeout}|T], Cpid, Data) when integer(Timeout),Timeout>0 ->
-    parse_args(T, Cpid, Data#eldap{timeout = Timeout});
-parse_args([{anon_auth, true}|T], Cpid, Data) ->
-    parse_args(T, Cpid, Data#eldap{anon_auth = false});
-parse_args([{anon_auth, _}|T], Cpid, Data) ->
-    parse_args(T, Cpid, Data);
-parse_args([{ssl, true}|T], Cpid, Data) ->
-    parse_args(T, Cpid, Data#eldap{use_tls = true});
-parse_args([{ssl, _}|T], Cpid, Data) ->
-    parse_args(T, Cpid, Data);
-parse_args([{log, F}|T], Cpid, Data) when function(F) ->
-    parse_args(T, Cpid, Data#eldap{log = F});
-parse_args([{log, _}|T], Cpid, Data) ->
-    parse_args(T, Cpid, Data);
-parse_args([H|_], Cpid, _) ->
-    send(Cpid, {error,{wrong_option,H}}),
-    exit(wrong_option);
-parse_args([], _, Data) ->
-    Data.
-                 
-%%% Try to connect to the hosts in the listed order,
-%%% and stop with the first one to which a successful
-%%% connection is made.
-
-try_connect([Host|Hosts], Data) ->
-    TcpOpts = [{packet, asn1}, {active,false}],
-    case do_connect(Host, Data, TcpOpts) of
-       {ok,Fd} -> {ok,Data#eldap{host = Host, fd   = Fd}};
-       _       -> try_connect(Hosts, Data)
-    end;
-try_connect([],_) ->
-    {error,"connect failed"}.
-
-do_connect(Host, Data, Opts) when Data#eldap.use_tls == false ->
-    gen_tcp:connect(Host, Data#eldap.port, Opts, Data#eldap.timeout);
-do_connect(Host, Data, Opts) when Data#eldap.use_tls == true ->
-    ssl:connect(Host, Data#eldap.port, [{verify,0}|Opts]).
-
-
-loop(Cpid, Data) ->
-    receive
-
-       {From, {search, A}} ->
-           {Res,NewData} = do_search(Data, A),
-           send(From,Res),
-           loop(Cpid, NewData);
-
-       {From, {modify, Obj, Mod}} ->
-           {Res,NewData} = do_modify(Data, Obj, Mod),
-           send(From,Res),
-           loop(Cpid, NewData);
-
-       {From, {modify_dn, Obj, NewRDN, DelOldRDN, NewSup}} ->
-           {Res,NewData} = do_modify_dn(Data, Obj, NewRDN, DelOldRDN, NewSup),
-           send(From,Res),
-           loop(Cpid, NewData);
-
-       {From, {add, Entry, Attrs}} ->
-           {Res,NewData} = do_add(Data, Entry, Attrs),
-           send(From,Res),
-           loop(Cpid, NewData);
-
-       {From, {delete, Entry}} ->
-           {Res,NewData} = do_delete(Data, Entry),
-           send(From,Res),
-           loop(Cpid, NewData);
-
-       {From, {simple_bind, Dn, Passwd}} ->
-           {Res,NewData} = do_simple_bind(Data, Dn, Passwd),
-           send(From,Res),
-           loop(Cpid, NewData);
-
-       {From, {cnt_proc, NewCpid}} ->
-           unlink(Cpid),
-           send(From,ok),
-           ?PRINT("New Cpid is: ~p~n",[NewCpid]),
-           loop(NewCpid, Data);
-
-       {From, close} ->
-           unlink(Cpid),
-           exit(closed);
-
-       {Cpid, 'EXIT', Reason} ->
-           ?PRINT("Got EXIT from Cpid, reason=~p~n",[Reason]),
-           exit(Reason);
-
-       _XX ->
-           ?PRINT("loop got: ~p~n",[_XX]),
-           loop(Cpid, Data)
-
-    end.
-
-%%% --------------------------------------------------------------------
-%%% bindRequest
-%%% --------------------------------------------------------------------
-
-%%% Authenticate ourselves to the directory using
-%%% simple authentication.
-
-do_simple_bind(Data, anon, anon) ->   %% For testing
-    do_the_simple_bind(Data, "", "");
-do_simple_bind(Data, Dn, _Passwd) when Dn=="",Data#eldap.anon_auth==false ->
-    {{error,anonymous_auth},Data};
-do_simple_bind(Data, _Dn, Passwd) when Passwd=="",Data#eldap.anon_auth==false ->
-    {{error,anonymous_auth},Data};
-do_simple_bind(Data, Dn, Passwd) ->
-    do_the_simple_bind(Data, Dn, Passwd).
-
-do_the_simple_bind(Data, Dn, Passwd) ->
-    case catch exec_simple_bind(Data#eldap{binddn = Dn, 
-                                          passwd = Passwd,
-                                          id     = bump_id(Data)}) of
-       {ok,NewData} -> {ok,NewData};
-       {error,Emsg} -> {{error,Emsg},Data};
-       Else         -> {{error,Else},Data}
-    end.
-
-exec_simple_bind(Data) ->
-    Req = #'BindRequest'{version        = Data#eldap.version,
-                        name           = Data#eldap.binddn,  
-                        authentication = {simple, Data#eldap.passwd}},
-    log2(Data, "bind request = ~p~n", [Req]),
-    Reply = request(Data#eldap.fd, Data, Data#eldap.id, {bindRequest, Req}),
-    log2(Data, "bind reply = ~p~n", [Reply]),    
-    exec_simple_bind_reply(Data, Reply).
-
-exec_simple_bind_reply(Data, {ok,Msg}) when 
-  Msg#'LDAPMessage'.messageID == Data#eldap.id ->
-    case Msg#'LDAPMessage'.protocolOp of
-       {bindResponse, Result} ->
-           case Result#'BindResponse'.resultCode of
-               success -> {ok,Data};
-               Error   -> {error, Error}
-           end;
-       Other -> {error, Other}
-    end;
-exec_simple_bind_reply(_, Error) ->
-    {error, Error}.
-
-
-%%% --------------------------------------------------------------------
-%%% searchRequest
-%%% --------------------------------------------------------------------
-
-do_search(Data, A) ->
-    case catch do_search_0(Data, A) of
-       {error,Emsg}         -> {ldap_closed_p(Data, Emsg),Data};
-       {'EXIT',Error}       -> {ldap_closed_p(Data, Error),Data};
-       {ok,Res,Ref,NewData} -> {{ok,polish(Res, Ref)},NewData};
-       Else                 -> {ldap_closed_p(Data, Else),Data}
-    end.
-
-%%%
-%%% Polish the returned search result
-%%%
-
-polish(Res, Ref) ->
-    R = polish_result(Res),
-    %%% No special treatment of referrals at the moment.
-    #eldap_search_result{entries = R,
-                        referrals = Ref}.
-
-polish_result([H|T]) when record(H, 'SearchResultEntry') ->
-    ObjectName = H#'SearchResultEntry'.objectName,
-    F = fun({_,A,V}) -> {A,V} end,
-    Attrs = lists:map(F, H#'SearchResultEntry'.attributes),
-    [#eldap_entry{object_name = ObjectName,
-                 attributes  = Attrs}|
-     polish_result(T)];
-polish_result([]) ->
-    [].
-
-do_search_0(Data, A) ->
-    Req = #'SearchRequest'{baseObject = A#eldap_search.base,
-                          scope = v_scope(A#eldap_search.scope),
-                          derefAliases = neverDerefAliases,
-                          sizeLimit = 0, % no size limit
-                          timeLimit = v_timeout(A#eldap_search.timeout),
-                          typesOnly = v_bool(A#eldap_search.types_only),
-                          filter = v_filter(A#eldap_search.filter),
-                          attributes = v_attributes(A#eldap_search.attributes)
-                         },
-    Id = bump_id(Data),
-    collect_search_responses(Data#eldap{id=Id}, Req, Id).
-    
-%%% The returned answers cames in one packet per entry
-%%% mixed with possible referals
-
-collect_search_responses(Data, Req, ID) ->
-    S = Data#eldap.fd,
-    log2(Data, "search request = ~p~n", [Req]),
-    send_request(S, Data, ID, {searchRequest, Req}),
-    Resp = recv_response(S, Data),
-    log2(Data, "search reply = ~p~n", [Resp]),    
-    collect_search_responses(Data, S, ID, Resp, [], []).
-
-collect_search_responses(Data, S, ID, {ok,Msg}, Acc, Ref) 
-  when record(Msg,'LDAPMessage') ->
-    case Msg#'LDAPMessage'.protocolOp of
-       {'searchResDone',R} when R#'LDAPResult'.resultCode == success ->
-           log2(Data, "search reply = searchResDone ~n", []),    
-           {ok,Acc,Ref,Data};
-       {'searchResEntry',R} when record(R,'SearchResultEntry') ->
-           Resp = recv_response(S, Data),
-           log2(Data, "search reply = ~p~n", [Resp]),    
-           collect_search_responses(Data, S, ID, Resp, [R|Acc], Ref);
-       {'searchResRef',R} ->
-           %% At the moment we don't do anyting sensible here since
-           %% I haven't been able to trigger the server to generate
-           %% a response like this.
-           Resp = recv_response(S, Data),
-           log2(Data, "search reply = ~p~n", [Resp]),    
-           collect_search_responses(Data, S, ID, Resp, Acc, [R|Ref]);
-       Else ->
-           throw({error,Else})
-    end;
-collect_search_responses(_, _, _, Else, _, _) ->
-    throw({error,Else}).
-
-%%% --------------------------------------------------------------------
-%%% addRequest
-%%% --------------------------------------------------------------------
-
-do_add(Data, Entry, Attrs) ->
-    case catch do_add_0(Data, Entry, Attrs) of
-       {error,Emsg}   -> {ldap_closed_p(Data, Emsg),Data};
-       {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data};
-       {ok,NewData}   -> {ok,NewData};
-       Else           -> {ldap_closed_p(Data, Else),Data}
-    end.
-
-do_add_0(Data, Entry, Attrs) ->
-    Req = #'AddRequest'{entry = Entry,
-                       attributes = Attrs},
-    S = Data#eldap.fd,
-    Id = bump_id(Data),
-    log2(Data, "add request = ~p~n", [Req]),
-    Resp = request(S, Data, Id, {addRequest, Req}),
-    log2(Data, "add reply = ~p~n", [Resp]),    
-    check_reply(Data#eldap{id = Id}, Resp, addResponse).
-
-
-%%% --------------------------------------------------------------------
-%%% deleteRequest
-%%% --------------------------------------------------------------------
-
-do_delete(Data, Entry) ->
-    case catch do_delete_0(Data, Entry) of
-       {error,Emsg}   -> {ldap_closed_p(Data, Emsg),Data};
-       {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data};
-       {ok,NewData}   -> {ok,NewData};
-       Else           -> {ldap_closed_p(Data, Else),Data}
-    end.
-
-do_delete_0(Data, Entry) ->
-    S = Data#eldap.fd,
-    Id = bump_id(Data),
-    log2(Data, "del request = ~p~n", [Entry]),
-    Resp = request(S, Data, Id, {delRequest, Entry}),
-    log2(Data, "del reply = ~p~n", [Resp]),    
-    check_reply(Data#eldap{id = Id}, Resp, delResponse).
-
-
-%%% --------------------------------------------------------------------
-%%% modifyRequest
-%%% --------------------------------------------------------------------
-
-do_modify(Data, Obj, Mod) ->
-    case catch do_modify_0(Data, Obj, Mod) of
-       {error,Emsg}   -> {ldap_closed_p(Data, Emsg),Data};
-       {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data};
-       {ok,NewData}   -> {ok,NewData};
-       Else           -> {ldap_closed_p(Data, Else),Data}
-    end.
-
-do_modify_0(Data, Obj, Mod) ->
-    v_modifications(Mod),
-    Req = #'ModifyRequest'{object = Obj,
-                          modification = Mod},
-    S = Data#eldap.fd,
-    Id = bump_id(Data),
-    log2(Data, "modify request = ~p~n", [Req]),
-    Resp = request(S, Data, Id, {modifyRequest, Req}),
-    log2(Data, "modify reply = ~p~n", [Resp]),    
-    check_reply(Data#eldap{id = Id}, Resp, modifyResponse).
-
-%%% --------------------------------------------------------------------
-%%% modifyDNRequest
-%%% --------------------------------------------------------------------
-
-do_modify_dn(Data, Entry, NewRDN, DelOldRDN, NewSup) ->
-    case catch do_modify_dn_0(Data, Entry, NewRDN, DelOldRDN, NewSup) of
-       {error,Emsg}   -> {ldap_closed_p(Data, Emsg),Data};
-       {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data};
-       {ok,NewData}   -> {ok,NewData};
-       Else           -> {ldap_closed_p(Data, Else),Data}
-    end.
-
-do_modify_dn_0(Data, Entry, NewRDN, DelOldRDN, NewSup) ->
-    Req = #'ModifyDNRequest'{entry = Entry,
-                            newrdn = NewRDN,
-                            deleteoldrdn = DelOldRDN,
-                            newSuperior = NewSup},
-    S = Data#eldap.fd,
-    Id = bump_id(Data),
-    log2(Data, "modify DN request = ~p~n", [Req]),
-    Resp = request(S, Data, Id, {modDNRequest, Req}),
-    log2(Data, "modify DN reply = ~p~n", [Resp]),    
-    check_reply(Data#eldap{id = Id}, Resp, modDNResponse).
-
-%%% --------------------------------------------------------------------
-%%% Send an LDAP request and receive the answer
-%%% --------------------------------------------------------------------
-
-request(S, Data, ID, Request) ->
-    send_request(S, Data, ID, Request),
-    recv_response(S, Data).
-
-send_request(S, Data, ID, Request) ->
-    Message = #'LDAPMessage'{messageID  = ID,
-                            protocolOp = Request},
-    {ok,Bytes} = asn1rt:encode('ELDAPv3', 'LDAPMessage', Message),
-    case do_send(S, Data, Bytes) of
-       {error,Reason} -> throw({gen_tcp_error,Reason});
-       Else           -> Else
-    end.
-
-do_send(S, Data, Bytes) when Data#eldap.use_tls == false ->
-    gen_tcp:send(S, Bytes);
-do_send(S, Data, Bytes) when Data#eldap.use_tls == true ->
-    ssl:send(S, Bytes).
-
-do_recv(S, Data, Len, Timeout) when Data#eldap.use_tls == false ->
-    gen_tcp:recv(S, Len, Timeout);
-do_recv(S, Data, Len, Timeout) when Data#eldap.use_tls == true ->
-    ssl:recv(S, Len, Timeout).
-
-recv_response(S, Data) ->
-    Timeout = get(req_timeout), % kludge...
-    case do_recv(S, Data, 0, Timeout) of
-       {ok, Packet} ->
-           check_tag(Packet),
-           case asn1rt:decode('ELDAPv3', 'LDAPMessage', Packet) of
-               {ok,Resp} -> {ok,Resp};
-               Error     -> throw(Error)
-           end;
-       {error,Reason} ->
-           throw({gen_tcp_error, Reason});
-       Error ->
-           throw(Error)
-    end.
-
-%%% Sanity check of received packet
-check_tag(Data) ->
-    case asn1rt_ber_bin:decode_tag(b2l(Data)) of
-       {_Tag, Data1, _Rb} ->
-           case asn1rt_ber_bin:decode_length(b2l(Data1)) of
-               {{_Len, _Data2}, _Rb2} -> ok;
-               _ -> throw({error,decoded_tag_length})
-           end;
-       _ -> throw({error,decoded_tag})
-    end.
-
-%%% Check for expected kind of reply
-check_reply(Data, {ok,Msg}, Op) when 
-  Msg#'LDAPMessage'.messageID == Data#eldap.id ->
-    case Msg#'LDAPMessage'.protocolOp of
-       {Op, Result} ->
-           case Result#'LDAPResult'.resultCode of
-               success -> {ok,Data};
-               Error   -> {error, Error}
-           end;
-       Other -> {error, Other}
-    end;
-check_reply(_, Error, _) ->
-    {error, Error}.
-
-
-%%% --------------------------------------------------------------------
-%%% Verify the input data
-%%% --------------------------------------------------------------------
-
-v_filter({'and',L})           -> {'and',L};
-v_filter({'or', L})           -> {'or',L};
-v_filter({'not',L})           -> {'not',L};
-v_filter({equalityMatch,AV})  -> {equalityMatch,AV};
-v_filter({greaterOrEqual,AV}) -> {greaterOrEqual,AV};
-v_filter({lessOrEqual,AV})    -> {lessOrEqual,AV};
-v_filter({approxMatch,AV})    -> {approxMatch,AV};
-v_filter({present,A})         -> {present,A};
-v_filter({substrings,S}) when record(S,'SubstringFilter') -> {substrings,S};
-v_filter(_Filter) -> throw({error,concat(["unknown filter: ",_Filter])}).
-
-v_modifications(Mods) ->
-    F = fun({_,Op,_}) ->
-               case lists:member(Op,[add,delete,replace]) of
-                   true -> true;
-                   _    -> throw({error,{mod_operation,Op}})
-               end
-       end,
-    lists:foreach(F, Mods).
-
-v_substr([{Key,Str}|T]) when list(Str),Key==initial;Key==any;Key==final ->
-    [{Key,Str}|v_substr(T)];
-v_substr([H|_]) ->
-    throw({error,{substring_arg,H}});
-v_substr([]) -> 
-    [].
-v_scope(baseObject)   -> baseObject;
-v_scope(singleLevel)  -> singleLevel;
-v_scope(wholeSubtree) -> wholeSubtree;
-v_scope(_Scope)       -> throw({error,concat(["unknown scope: ",_Scope])}).
-
-v_bool(true)  -> true;
-v_bool(false) -> false;
-v_bool(_Bool) -> throw({error,concat(["not Boolean: ",_Bool])}).
-
-v_timeout(I) when integer(I), I>=0 -> I;
-v_timeout(_I) -> throw({error,concat(["timeout not positive integer: ",_I])}).
-
-v_attributes(Attrs) ->
-    F = fun(A) when list(A) -> A;
-          (A) -> throw({error,concat(["attribute not String: ",A])})
-       end,
-    lists:map(F,Attrs).
-
-
-%%% --------------------------------------------------------------------
-%%% Log routines. Call a user provided log routine F.
-%%% --------------------------------------------------------------------
-
-log1(Data, Str, Args) -> log(Data, Str, Args, 1).
-log2(Data, Str, Args) -> log(Data, Str, Args, 2).
-
-log(Data, Str, Args, Level) when function(Data#eldap.log) ->
-    catch (Data#eldap.log)(Level, Str, Args);
-log(_, _, _, _) -> 
-    ok.
-
-
-%%% --------------------------------------------------------------------
-%%% Misc. routines
-%%% --------------------------------------------------------------------
-
-send(To,Msg) -> To ! {self(),Msg}.
-recv(From)   -> receive {From,Msg} -> Msg end.
-
-ldap_closed_p(Data, Emsg) when Data#eldap.use_tls == true ->
-    %% Check if the SSL socket seems to be alive or not
-    case catch ssl:sockname(Data#eldap.fd) of
-       {error, _} ->
-           ssl:close(Data#eldap.fd),
-           {error, ldap_closed};
-       {ok, _} ->
-           {error, Emsg};
-       _ ->
-           %% sockname crashes if the socket pid is not alive
-           {error, ldap_closed}
-    end;
-ldap_closed_p(Data, Emsg) ->
-    %% non-SSL socket
-    case inet:port(Data#eldap.fd) of
-       {error,_} -> {error, ldap_closed};
-       _         -> {error,Emsg}
-    end.
-    
-bump_id(Data) -> Data#eldap.id + 1.
-
-    
-%%% --------------------------------------------------------------------
-%%% parse_dn/1  -  Implementation of RFC 2253:
-%%%
-%%%   "UTF-8 String Representation of Distinguished Names"
-%%%
-%%% Test cases:
-%%%
-%%%  The simplest case:
-%%%  
-%%%  1> eldap:parse_dn("CN=Steve Kille,O=Isode Limited,C=GB").
-%%%  {ok,[[{attribute_type_and_value,"CN","Steve Kille"}],
-%%%       [{attribute_type_and_value,"O","Isode Limited"}],
-%%%       [{attribute_type_and_value,"C","GB"}]]}
-%%%
-%%%  The first RDN is multi-valued:
-%%%  
-%%%  2> eldap:parse_dn("OU=Sales+CN=J. Smith,O=Widget Inc.,C=US").
-%%%  {ok,[[{attribute_type_and_value,"OU","Sales"},
-%%%        {attribute_type_and_value,"CN","J. Smith"}],
-%%%       [{attribute_type_and_value,"O","Widget Inc."}],
-%%%       [{attribute_type_and_value,"C","US"}]]}
-%%%
-%%%  Quoting a comma:
-%%%
-%%%  3> eldap:parse_dn("CN=L. Eagle,O=Sue\\, Grabbit and Runn,C=GB").
-%%%  {ok,[[{attribute_type_and_value,"CN","L. Eagle"}],
-%%%       [{attribute_type_and_value,"O","Sue\\, Grabbit and Runn"}],
-%%%       [{attribute_type_and_value,"C","GB"}]]}
-%%%
-%%%  A value contains a carriage return:
-%%%
-%%%  4> eldap:parse_dn("CN=Before                                    
-%%%  4> After,O=Test,C=GB").
-%%%  {ok,[[{attribute_type_and_value,"CN","Before\nAfter"}],
-%%%       [{attribute_type_and_value,"O","Test"}],
-%%%       [{attribute_type_and_value,"C","GB"}]]}
-%%%
-%%%  5> eldap:parse_dn("CN=Before\\0DAfter,O=Test,C=GB").
-%%%  {ok,[[{attribute_type_and_value,"CN","Before\\0DAfter"}],
-%%%       [{attribute_type_and_value,"O","Test"}],
-%%%       [{attribute_type_and_value,"C","GB"}]]}
-%%%  
-%%%  An RDN in OID form:
-%%%  
-%%%  6> eldap:parse_dn("1.3.6.1.4.1.1466.0=#04024869,O=Test,C=GB").
-%%%  {ok,[[{attribute_type_and_value,"1.3.6.1.4.1.1466.0","#04024869"}],
-%%%       [{attribute_type_and_value,"O","Test"}],
-%%%       [{attribute_type_and_value,"C","GB"}]]}
-%%%  
-%%%
-%%% --------------------------------------------------------------------
-
-parse_dn("") -> % empty DN string
-    {ok,[]};  
-parse_dn([H|_] = Str) when H=/=$, -> % 1:st name-component !
-    case catch parse_name(Str,[]) of
-       {'EXIT',Reason} -> {parse_error,internal_error,Reason};
-       Else            -> Else
-    end.
-
-parse_name("",Acc)  -> 
-    {ok,lists:reverse(Acc)};
-parse_name([$,|T],Acc) -> % N:th name-component !
-    parse_name(T,Acc);
-parse_name(Str,Acc) ->
-    {Rest,NameComponent} = parse_name_component(Str),
-    parse_name(Rest,[NameComponent|Acc]).
-    
-parse_name_component(Str) ->
-    parse_name_component(Str,[]).
-
-parse_name_component(Str,Acc) ->
-    case parse_attribute_type_and_value(Str) of
-       {[$+|Rest], ATV} ->
-           parse_name_component(Rest,[ATV|Acc]);
-       {Rest,ATV} ->
-           {Rest,lists:reverse([ATV|Acc])}
-    end.
-
-parse_attribute_type_and_value(Str) ->
-    case parse_attribute_type(Str) of
-       {Rest,[]} -> 
-           error(expecting_attribute_type,Str);
-       {Rest,Type} ->
-           Rest2 = parse_equal_sign(Rest),
-           {Rest3,Value} = parse_attribute_value(Rest2),
-           {Rest3,{attribute_type_and_value,Type,Value}}
-    end.
-
--define(IS_ALPHA(X) , X>=$a,X=<$z;X>=$A,X=<$Z ).
--define(IS_DIGIT(X) , X>=$0,X=<$9 ).
--define(IS_SPECIAL(X) , X==$,;X==$=;X==$+;X==$<;X==$>;X==$#;X==$; ).
--define(IS_QUOTECHAR(X) , X=/=$\\,X=/=$" ).
--define(IS_STRINGCHAR(X) , 
-       X=/=$,,X=/=$=,X=/=$+,X=/=$<,X=/=$>,X=/=$#,X=/=$;,?IS_QUOTECHAR(X) ).
--define(IS_HEXCHAR(X) , ?IS_DIGIT(X);X>=$a,X=<$f;X>=$A,X=<$F ).
-
-parse_attribute_type([H|T]) when ?IS_ALPHA(H) ->
-    %% NB: It must be an error in the RFC in the definition
-    %% of 'attributeType', should be: (ALPHA *keychar)
-    {Rest,KeyChars} = parse_keychars(T),  
-    {Rest,[H|KeyChars]};
-parse_attribute_type([H|_] = Str) when ?IS_DIGIT(H) ->
-    parse_oid(Str);
-parse_attribute_type(Str) ->
-    error(invalid_attribute_type,Str).
-
-
-
-%%% Is a hexstring !
-parse_attribute_value([$#,X,Y|T]) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) ->
-    {Rest,HexString} = parse_hexstring(T),
-    {Rest,[$#,X,Y|HexString]}; 
-%%% Is a "quotation-sequence" !
-parse_attribute_value([$"|T]) -> 
-    {Rest,Quotation} = parse_quotation(T),
-    {Rest,[$"|Quotation]};
-%%% Is a stringchar , pair or Empty !
-parse_attribute_value(Str) -> 
-    parse_string(Str).
-
-parse_hexstring(Str) ->
-    parse_hexstring(Str,[]).
-
-parse_hexstring([X,Y|T],Acc) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) ->
-    parse_hexstring(T,[Y,X|Acc]);
-parse_hexstring(T,Acc) ->
-    {T,lists:reverse(Acc)}.
-
-parse_quotation([$"|T]) -> % an empty: ""  is ok !
-    {T,[$"]};
-parse_quotation(Str) -> 
-    parse_quotation(Str,[]).
-
-%%% Parse to end of quotation
-parse_quotation([$"|T],Acc) -> 
-    {T,lists:reverse([$"|Acc])};
-parse_quotation([X|T],Acc) when ?IS_QUOTECHAR(X) -> 
-    parse_quotation(T,[X|Acc]);
-parse_quotation([$\\,X|T],Acc) when ?IS_SPECIAL(X) -> 
-    parse_quotation(T,[X,$\\|Acc]);
-parse_quotation([$\\,$\\|T],Acc) -> 
-    parse_quotation(T,[$\\,$\\|Acc]);
-parse_quotation([$\\,$"|T],Acc) -> 
-    parse_quotation(T,[$",$\\|Acc]);
-parse_quotation([$\\,X,Y|T],Acc) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) -> 
-    parse_quotation(T,[Y,X,$\\|Acc]);
-parse_quotation(T,_) -> 
-    error(expecting_double_quote_mark,T).
-
-parse_string(Str) -> 
-    parse_string(Str,[]).
-
-parse_string("",Acc) -> 
-    {"",lists:reverse(Acc)};
-parse_string([H|T],Acc) when ?IS_STRINGCHAR(H) -> 
-    parse_string(T,[H|Acc]);
-parse_string([$\\,X|T],Acc) when ?IS_SPECIAL(X) -> % is a pair !
-    parse_string(T,[X,$\\|Acc]);
-parse_string([$\\,$\\|T],Acc)                   -> % is a pair !
-    parse_string(T,[$\\,$\\|Acc]);
-parse_string([$\\,$" |T],Acc)                   -> % is a pair !
-    parse_string(T,[$" ,$\\|Acc]);
-parse_string([$\\,X,Y|T],Acc) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) -> % is a pair!
-    parse_string(T,[Y,X,$\\|Acc]);
-parse_string(T,Acc) ->
-    {T,lists:reverse(Acc)}.
-
-parse_equal_sign([$=|T]) -> T;
-parse_equal_sign(T)      -> error(expecting_equal_sign,T).
-
-parse_keychars(Str) -> parse_keychars(Str,[]).
-
-parse_keychars([H|T],Acc) when ?IS_ALPHA(H) -> parse_keychars(T,[H|Acc]);
-parse_keychars([H|T],Acc) when ?IS_DIGIT(H) -> parse_keychars(T,[H|Acc]);
-parse_keychars([$-|T],Acc)                  -> parse_keychars(T,[$-|Acc]);
-parse_keychars(T,Acc)                       -> {T,lists:reverse(Acc)}.
-
-parse_oid(Str) -> parse_oid(Str,[]).
-
-parse_oid([H,$.|T], Acc) when ?IS_DIGIT(H) ->
-    parse_oid(T,[$.,H|Acc]);
-parse_oid([H|T], Acc) when ?IS_DIGIT(H) ->
-    parse_oid(T,[H|Acc]);
-parse_oid(T, Acc) ->
-    {T,lists:reverse(Acc)}.
-
-error(Emsg,Rest) -> 
-    throw({parse_error,Emsg,Rest}).
-
-
-%%% --------------------------------------------------------------------
-%%% Parse LDAP url according to RFC 2255
-%%%
-%%% Test case:
-%%%
-%%%  2> eldap:parse_ldap_url("ldap://10.42.126.33:389/cn=Administrative%20CA,o=Post%20Danmark,c=DK?certificateRevokationList;binary").
-%%%  {ok,{{10,42,126,33},389},
-%%%      [[{attribute_type_and_value,"cn","Administrative%20CA"}],
-%%%       [{attribute_type_and_value,"o","Post%20Danmark"}],
-%%%       [{attribute_type_and_value,"c","DK"}]],
-%%%      {attributes,["certificateRevokationList;binary"]}}
-%%%
-%%% --------------------------------------------------------------------
-
-parse_ldap_url("ldap://" ++ Rest1 = Str) ->
-    {Rest2,HostPort} = parse_hostport(Rest1),
-    %% Split the string into DN and Attributes+etc
-    {Sdn,Rest3} = split_string(rm_leading_slash(Rest2),$?),
-    case parse_dn(Sdn) of
-       {parse_error,internal_error,_Reason} ->
-           {parse_error,internal_error,{Str,[]}};
-       {parse_error,Emsg,Tail} ->
-           Head = get_head(Str,Tail),
-           {parse_error,Emsg,{Head,Tail}};
-       {ok,DN} ->
-            %% We stop parsing here for now and leave
-            %% 'scope', 'filter' and 'extensions' to
-            %% be implemented later if needed.                               
-           {_Rest4,Attributes} = parse_attributes(Rest3),
-           {ok,HostPort,DN,Attributes}
-    end.
-
-rm_leading_slash([$/|Tail]) -> Tail;
-rm_leading_slash(Tail)      -> Tail.
-
-parse_attributes([$?|Tail]) ->
-    case split_string(Tail,$?) of
-        {[],Attributes} ->
-           {[],{attributes,string:tokens(Attributes,",")}};
-        {Attributes,Rest} ->
-            {Rest,{attributes,string:tokens(Attributes,",")}}
-    end.
-
-parse_hostport(Str) ->
-    {HostPort,Rest} = split_string(Str,$/),
-    case split_string(HostPort,$:) of
-       {Shost,[]} -> 
-           {Rest,{parse_host(Rest,Shost),?LDAP_PORT}};
-       {Shost,[$:|Sport]} ->
-           {Rest,{parse_host(Rest,Shost),
-                  parse_port(Rest,Sport)}}
-    end.
-
-parse_port(Rest,Sport) ->
-    case list_to_integer(Sport) of
-       Port when integer(Port) -> Port;
-       _ -> error(parsing_port,Rest)
-    end.
-
-parse_host(Rest,Shost) ->
-    case catch validate_host(Shost) of
-       {parse_error,Emsg,_} -> error(Emsg,Rest);
-       Host -> Host
-    end.
-
-validate_host(Shost) ->
-    case inet_parse:address(Shost) of
-       {ok,Host} -> Host;
-       _ ->
-           case inet_parse:domain(Shost) of
-               true -> Shost;
-               _    -> error(parsing_host,Shost)
-           end
-    end.
-
-    
-split_string(Str,Key) ->
-    Pred = fun(X) when X==Key -> false; (_) -> true end,
-    lists:splitwith(Pred, Str).
-
-get_head(Str,Tail) ->
-    get_head(Str,Tail,[]).
-
-%%% Should always succeed !
-get_head([H|Tail],Tail,Rhead) -> lists:reverse([H|Rhead]);
-get_head([H|Rest],Tail,Rhead) -> get_head(Rest,Tail,[H|Rhead]).
-
-b2l(B) when binary(B) -> B;
-b2l(L) when list(L)   -> list_to_binary(L).
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test
deleted file mode 100644 (file)
index 9816216..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-%%% $Id$
-
-%%% --------------------------------------------------------------------
-%%% Init setup
-%%% --------------------------------------------------------------------
-
-I set up the OpenLDAP (2.0.6) server using the following
-/usr/local/etc/openldap/slapd.conf file:
-
-  include         /usr/local/etc/openldap/schema/core.schema
-  pidfile         /var/run/slapd.pid
-  argsfile        /var/run/slapd.args
-  database        ldbm
-  suffix          "dc=bluetail, dc=com"
-  rootdn          "dc=bluetail, dc=com"
-  rootpw          hejsan
-  directory       /usr/local/var/openldap-ldbm
-  index   objectClass     eq
-
-
-%%% I started it on the console with some debug output:
-
-  /usr/local/libexec/slapd -d 255 -f /usr/local/etc/openldap/slapd.conf
-
-%%% Then I defined the following data in: bluetail.ldif
-
- dn: dc=bluetail, dc=com
- objectclass: organization
- objectclass: dcObject
- dc: bluetail
- o: Bluetail AB
-
-%%% and in: tobbe.ldif
-
- dn: cn=Torbjorn Tornkvist, dc=bluetail, dc=com
- objectclass: person
- cn: Torbjorn Tornkvist
- sn: Tornkvist
-
-%%% I load the data with:
-
- ldapadd -D "dc=bluetail, dc=com" -w hejsan < bluetail.ldif
- ldapadd -D "dc=bluetail, dc=com" -w hejsan < people.ldif
-
-%%%% To search from a Unix shell:
-
- ldapsearch -L -b "dc=bluetail, dc=com" -w hejsan "(objectclass=*)"
- ldapsearch -L -b "dc=bluetail, dc=com" -w hejsan "cn=Torbjorn Tornkvist"
- ldapsearch -L -b "dc=bluetail, dc=com" -w hejsan "cn=Torb*kvist"
-
-%%% --------------------------------------------------------------------
-%%% Example with certificateRevocationList
-%%% --------------------------------------------------------------------
-
-%%% Using two ldif files:
-
-%%% post_danmark.ldif 
-
-dn: o=Post Danmark, c=DK
-objectclass: country
-objectclass: organization
-c: DK
-o: Post Danmark
-
-%%% crl.ldif
-
-dn: cn=Administrative CA, o=Post Danmark, c=DK
-objectclass: cRLDistributionPoint
-cn: Administrative CA
-certificateRevocationList;binary:< file:/home/tobbe/erlang/eldap/server1.crl
-
-%%% Note the definition of the CRL file !!
-
-%%% To add the difinitions
-
-ldapadd -D "o=Post Danmark, c=DK" -w hejsan < post_danmark.ldif
-ldapadd -D "o=Post Danmark, c=DK" -w hejsan < crl.ldif
-
-%%% And to retreive the CRL
-
-ldapsearch -L -b "o=Post Danmark, c=DK" -w hejsan "(objectclass=*)"
-ldapsearch -L -b "o=Post Danmark, c=DK" -w hejsan "(cn=Administrative CA)" \
-           certificateRevocationList
-
-### Put the retrieved binary in a file (tmp) with
-### the following header and footer
-
------BEGIN X509 CRL-----
- <...binary....>
------END X509 CRL-----
-
-### To verify it with openssl
-
- openssl crl -inform PEM -in tmp -text
-
-ldapsearch -L -D "cn=Torbjorn Tornkvist,o=Post Danmark,c=DK" -b "o=Post Danmark, c=DK" -w qwe123 "(cn=Torbjorn Tornkvist)" cn
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif
deleted file mode 100644 (file)
index 59022ad..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-dn: mail=bill@bluetail.com, dc=bluetail, dc=com
-objectclass: posixAccount
-mail: bill@bluetail.com
-cn: Bill Valentine
-sn: Valentine
-uid: bill
-uidNumber: 400
-gidNumber: 400
-homeDirectory: /home/accounts/bill
-mailDirectory: /home/accounts/bill/INBOX
-userPassword: baltazar
-birMailAccept: accept
-birCluster: bc1
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif
deleted file mode 100644 (file)
index 914532e..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-dn: dc=bluetail, dc=com
-objectclass: dcObject
-dc: bluetail
-
-dn: o=Bluetail AB, dc=bluetail, dc=com
-objectclass: organization
-o: Bluetail AB
-street: St.Eriksgatan 44
-postalCode: 112 34
-
-dn: ou=people, o=Bluetail AB, dc=bluetail, dc=com
-objectclass: organizationalUnit
-ou: people
-description: People working at Bluetail
-
-
-
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif
deleted file mode 100644 (file)
index 2e52873..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-dn: cn=Administrative CA,o=Post Danmark,c=DK
-objectclass: cRLDistributionPoint
-cn: Administrative CA
-certificateRevocationList;binary:< file:/home/tobbe/erlang/eldap/server1.crl
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl
deleted file mode 100644 (file)
index db64615..0000000
+++ /dev/null
@@ -1,537 +0,0 @@
--module(eldap_test).
-%%% --------------------------------------------------------------------
-%%% Created:  12 Oct 2000 by Tobbe 
-%%% Function: Test code for the eldap module
-%%%
-%%% Copyright (C) 2000  Torbjörn Törnkvist
-%%% Copyright (c) 2010 Torbjorn Tornkvist <tobbe@tornkvist.org>
-%%% See MIT-LICENSE at the top dir for licensing information.
-%%% 
-%%% --------------------------------------------------------------------
--vc('$Id$ ').
--export([topen_bind/1,topen_bind/2,all/0,t10/0,t20/0,t21/0,t22/0,
-        t23/0,t24/0,t25/0,t26/0,t27/0,debug/1,t30/0,t31/0,
-        t40/0,t41/0,t50/0,t51/0]).
--export([crl1/0]).
--export([switch/1]).
--export([junk/0]).
-
--include("ELDAPv3.hrl").
--include("eldap.hrl").
-
-junk() ->
-    DN = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-    Msg = #'LDAPMessage'{messageID  = 1,
-                        protocolOp = {delRequest,DN}},
-    asn1rt:encode('ELDAPv3', 'LDAPMessage', Msg).
-
-%%% --------------------------------------------------------------------
-%%% TEST STUFF
-%%% ----------
-%%% When adding a new test case it can be useful to
-%%% switch on debugging, i.e debug(t) in the call to
-%%% topen_bind/2.
-%%% --------------------------------------------------------------------
-
-all() ->
-    Check = "=== Check the result of the previous test case !~n",
-    t10(),
-    t20(),t21(),t22(),t23(),t24(),t25(),t26(),t27(),
-    t30(),t26(Check),t31(),t26(Check),
-    t40(),t26(Check),t41(),t26(Check),
-    t50(),t26(Check),t51(),t26(Check),
-    ok.
-
-%%%
-%%% Setup a connection and bind using simple authentication
-%%%
-t10() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 10 (connection setup + simple auth)~n"),
-               line(),
-               X = topen_bind("localhost", debug(f)),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do an equality match:  sn = Tornkvist
-%%%
-t20() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 20 (equality match)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Filter = eldap:equalityMatch("sn","Tornkvist"),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, Filter}])),
-
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do a substring match:  sn = To*kv*st
-%%%
-t21() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 21 (substring match)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Filter = eldap:substrings("sn", [{initial,"To"},
-                                                {any,"kv"},
-                                                {final,"st"}]),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, Filter}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do a substring match:  sn = *o* 
-%%% and do only retrieve the cn attribute
-%%%
-t22() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 22 (substring match + return 'cn' only)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Filter = eldap:substrings("sn", [{any,"o"}]),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, Filter},
-                                         {attributes,["cn"]}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-
-%%%
-%%% Do a present search for the attribute 'objectclass'
-%%% on the base level.
-%%%
-t23() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 23 (objectclass=* , base level)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, eldap:present("objectclass")},
-                                         {scope,eldap:baseObject()}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do a present search for the attribute 'objectclass'
-%%% on a single level.
-%%%
-t24() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 24 (objectclass=* , single level)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, eldap:present("objectclass")},
-                                         {scope,eldap:singleLevel()}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do a present search for the attribute 'objectclass'
-%%% on the whole subtree.
-%%%
-t25() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 25 (objectclass=* , whole subtree)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, eldap:present("objectclass")},
-                                         {scope,eldap:wholeSubtree()}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do a present search for the attributes 
-%%% 'objectclass' and 'sn' on the whole subtree.
-%%%
-t26() -> t26([]).
-t26(Heading) ->
-    F = fun() ->
-               sleep(),
-               line(),
-               heading(Heading,
-                       "=== TEST 26 (objectclass=* and sn=*)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Filter = eldap:'and'([eldap:present("objectclass"),
-                                     eldap:present("sn")]),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, Filter},
-                                         {scope,eldap:wholeSubtree()}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Do a present search for the attributes 
-%%% 'objectclass' and (not 'sn') on the whole subtree.
-%%%
-t27() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== TEST 27 (objectclass=* and (not sn))~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Filter = eldap:'and'([eldap:present("objectclass"),
-                                     eldap:'not'(eldap:present("sn"))]),
-               X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"},
-                                         {filter, Filter},
-                                         {scope,eldap:wholeSubtree()}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Replace the 'telephoneNumber' attribute and
-%%% add a new attribute 'description'
-%%%
-t30() -> t30([]).
-t30(Heading) ->
-    F = fun() ->
-               sleep(),
-               {_,_,Tno} = erlang:now(),
-               Stno = integer_to_list(Tno),
-               Desc = "LDAP hacker " ++ Stno,
-               line(),
-               heading(Heading,
-                       "=== TEST 30 (replace telephoneNumber/"
-                         ++ Stno ++ " add description/" ++ Desc
-                         ++ ")~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Obj = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-               Mod = [eldap:mod_replace("telephoneNumber", [Stno]),
-                      eldap:mod_add("description", [Desc])],
-               X=(catch eldap:modify(S, Obj, Mod)),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Delete attribute 'description'
-%%%
-t31() -> t31([]).
-t31(Heading) ->
-    F = fun() ->
-               sleep(),
-               {_,_,Tno} = erlang:now(),
-               line(),
-               heading(Heading,
-                       "=== TEST 31 (delete 'description' attribute)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Obj = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-               Mod = [eldap:mod_delete("description", [])],
-               X=(catch eldap:modify(S, Obj, Mod)),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Add an entry
-%%%
-t40() -> t40([]).
-t40(Heading) ->
-    F = fun() ->
-               sleep(),
-               {_,_,Tno} = erlang:now(),
-               line(),
-               heading(Heading,
-                       "=== TEST 40 (add entry 'Bill Valentine')~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Entry = "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-               X=(catch eldap:add(S, Entry,
-                                  [{"objectclass", ["person"]},
-                                   {"cn", ["Bill Valentine"]},
-                                   {"sn", ["Valentine"]},
-                                   {"telephoneNumber", ["545 555 00"]}])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Delete an entry
-%%%
-t41() -> t41([]).
-t41(Heading) ->
-    F = fun() ->
-               sleep(),
-               {_,_,Tno} = erlang:now(),
-               line(),
-               heading(Heading,
-                       "=== TEST 41 (delete entry 'Bill Valentine')~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Entry = "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-               X=(catch eldap:delete(S, Entry)),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Modify the DN of an entry
-%%%
-t50() -> t50([]).
-t50(Heading) ->
-    F = fun() ->
-               sleep(),
-               {_,_,Tno} = erlang:now(),
-               line(),
-               heading(Heading,
-                       "=== TEST 50 (modify DN to: 'Torbjorn M.Tornkvist')~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Entry = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-               X=(catch eldap:modify_dn(S, Entry,
-                                        "cn=Torbjorn M.Tornkvist",
-                                        false,
-                                        [])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%%
-%%% Modify the DN of an entry and remove the RDN attribute.
-%%% NB: Must be run after: 't50' !
-%%%
-t51() -> t51([]).
-t51(Heading) ->
-    F = fun() ->
-               sleep(),
-               {_,_,Tno} = erlang:now(),
-               line(),
-               heading(Heading,
-                       "=== TEST 51 (modify DN, remove the RDN attribute)~n"),
-               line(),
-               {ok,S} = topen_bind("localhost", debug(f)),
-               Entry = "cn=Torbjorn M.Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
-               X=(catch eldap:modify_dn(S, Entry,
-                                        "cn=Torbjorn Tornkvist",
-                                        true,
-                                        [])),
-               io:format("~p~n",[X]),
-               X
-       end,
-    go(F).
-
-%%% --------------------------------------------------------------------
-%%% Test cases for certificate revocation lists
-%%% --------------------------------------------------------------------
-
-crl1() ->
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== CRL-TEST 1 ~n"),
-               line(),
-               {ok,S} = crl_open_bind("localhost", debug(f)),
-               Filter = eldap:equalityMatch("cn","Administrative CA"),
-               X=(catch eldap:search(S, [{base, "o=Post Danmark, c=DK"},
-                                         {filter, Filter},
-                                         {attributes,["certificateRevocationList"]}])),
-               dump_to_file("test-crl1.result",X),
-               ok
-       end,
-    go(F).
-
-
-dump_to_file(Fname,{ok,Res}) ->
-    case Res#eldap_search_result.entries of
-       [Entry|_] ->
-           case Entry#eldap_entry.attributes of
-               [{Attribute,Value}|_] ->
-                   file:write_file(Fname,list_to_binary(Value)),
-                   io:format("Value of '~s' dumped to file: ~s~n",
-                             [Attribute,Fname]);
-               Else ->
-                   io:format("ERROR(dump_to_file): no attributes found~n",[])
-           end;
-       Else ->
-           io:format("ERROR(dump_to_file): no entries found~n",[])
-    end.
-
-switch(1) ->
-    %%
-    %% SEARCH
-    %%
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== SWITCH-TEST 1 (short-search)~n"),
-               line(),
-               {ok,S} = sw_open_bind("korp", debug(t)),
-               Filter = eldap:equalityMatch("cn","Administrative CA"),
-               X=(catch eldap:search(S, [{base, "o=Post Danmark, c=DK"},
-                                         {filter, Filter},
-                                         {attributes,["cn"]}])),
-               io:format("RESULT: ~p~n", [X]),
-               %%dump_to_file("test-switch-1.result",X),
-               eldap:close(S),
-               ok
-       end,
-    go(F);
-switch(2) ->
-    %%
-    %% ADD AN ENTRY
-    %%
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== SWITCH-TEST 2 (add-entry)~n"),
-               line(),
-               {ok,S} = sw_open_bind("korp", debug(t)),
-               Entry = "cn=Bill Valentine, o=Post Danmark, c=DK",
-               X=(catch eldap:add(S, Entry,
-                                  [{"objectclass", ["person"]},
-                                   {"cn", ["Bill Valentine"]},
-                                   {"sn", ["Valentine"]}
-                                   ])),
-               io:format("~p~n",[X]),
-               eldap:close(S),
-               X
-       end,
-    go(F);
-switch(3) ->
-    %%
-    %% SEARCH FOR THE NEWLEY ADDED ENTRY
-    %%
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== SWITCH-TEST 3 (search-added)~n"),
-               line(),
-               {ok,S} = sw_open_bind("korp", debug(t)),
-               Filter = eldap:equalityMatch("cn","Bill Valentine"),
-               X=(catch eldap:search(S, [{base, "o=Post Danmark, c=DK"},
-                                         {filter, Filter},
-                                         {attributes,["cn"]}])),
-               io:format("RESULT: ~p~n", [X]),
-               %%dump_to_file("test-switch-1.result",X),
-               eldap:close(S),
-               ok
-       end,
-    go(F);
-switch(4) ->
-    %%
-    %% DELETE THE NEWLEY ADDED ENTRY
-    %%
-    F = fun() ->
-               sleep(),
-               line(),
-               io:format("=== SWITCH-TEST 4 (delete-added)~n"),
-               line(),
-               {ok,S} = sw_open_bind("korp", debug(t)),
-               Entry = "cn=Bill Valentine, o=Post Danmark, c=DK",
-               X=(catch eldap:delete(S, Entry)),
-               io:format("RESULT: ~p~n", [X]),
-               %%dump_to_file("test-switch-1.result",X),
-               eldap:close(S),
-               ok
-       end,
-    go(F).
-
-
-
-%%% ---------------
-%%% Misc. functions
-%%% ---------------
-
-sw_open_bind(Host) -> 
-    sw_open_bind(Host, debug(t)).
-
-sw_open_bind(Host, Dbg) ->
-    sw_open_bind(Host, Dbg, "cn=Torbjorn Tornkvist,o=Post Danmark,c=DK", "qwe123").
-
-sw_open_bind(Host, LogFun, RootDN, Passwd) ->
-    Opts = [{log,LogFun},{port,9779}],
-    {ok,Handle} = eldap:open([Host], Opts),
-    {eldap:simple_bind(Handle, RootDN, Passwd),
-     Handle}.
-
-crl_open_bind(Host) -> 
-    crl_open_bind(Host, debug(t)).
-
-crl_open_bind(Host, Dbg) ->
-    do_open_bind(Host, Dbg, "o=Post Danmark, c=DK", "hejsan").
-
-topen_bind(Host) -> 
-    topen_bind(Host, debug(t)).
-
-topen_bind(Host, Dbg) -> 
-    do_open_bind(Host, Dbg, "dc=bluetail, dc=com", "hejsan").
-
-do_open_bind(Host, LogFun, RootDN, Passwd) ->
-    Opts = [{log,LogFun}],
-    {ok,Handle} = eldap:open([Host], Opts),
-    {eldap:simple_bind(Handle, RootDN, Passwd),
-     Handle}.
-
-debug(t) -> fun(L,S,A) -> io:format("--- " ++ S, A) end;
-debug(1) -> fun(L,S,A) when L =< 1 -> io:format("--- " ++ S, A) end;
-debug(2) -> fun(L,S,A) when L =< 2 -> io:format("--- " ++ S, A) end;
-debug(f) -> false.
-
-sleep()    -> msleep(400).
-%sleep(Sec) -> msleep(Sec*1000).
-msleep(T)  -> receive after T -> true end.
-
-line() ->
-    S = "==============================================================\n",
-    io:format(S).
-
-heading([], Heading) -> io:format(Heading);
-heading(Heading, _ ) -> io:format(Heading).
-
-%%%
-%%% Process to run the test case
-%%%
-go(F) ->
-    Self = self(),
-    Pid = spawn(fun() -> run(F,Self) end),
-    receive {Pid, X} -> ok end.
-
-run(F, Pid) ->
-    Pid ! {self(),catch F()}.
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc
deleted file mode 100644 (file)
index 6cbdfea..0000000
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/bin/sh
-#
-# ldap This shell script takes care of starting and stopping
-#      ldap servers (slapd and slurpd).
-#
-# chkconfig: - 39 61
-# description: LDAP stands for Lightweight Directory Access Protocol, used \
-#              for implementing the industry standard directory services.
-# processname: slapd
-# config: /etc/openldap/slapd.conf
-# pidfile: /var/run/slapd.pid
-
-# Source function library.
-. /etc/init.d/functions
-
-# Source networking configuration and check that networking is up.
-if [ -r /etc/sysconfig/network ] ; then
-       . /etc/sysconfig/network
-       [ ${NETWORKING} = "no" ] && exit 0
-fi
-
-
-slapd=/usr/sbin/slapd
-slurpd=/usr/sbin/slurpd
-[ -x ${slapd} ] || exit 0
-[ -x ${slurpd} ] || exit 0
-
-RETVAL=0
-
-function start() {
-        # Start daemons.
-        echo -n "Starting slapd:"
-       daemon ${slapd}
-       RETVAL=$?
-       echo
-       if [ $RETVAL -eq 0 ]; then
-            if grep -q "^replogfile" /etc/openldap/slapd.conf; then
-               echo -n "Starting slurpd:"
-                daemon ${slurpd}
-               RETVAL=$?
-               echo
-            fi
-       fi
-       [ $RETVAL -eq 0 ] && touch /var/lock/subsys/ldap
-       return $RETVAL
-}
-
-function stop() {
-        # Stop daemons.
-       echo -n "Shutting down ldap: "
-       killproc ${slapd}
-       RETVAL=$?
-       if [ $RETVAL -eq 0 ]; then
-           if grep -q "^replogfile" /etc/openldap/slapd.conf; then
-               killproc ${slurpd}
-               RETVAL=$?
-           fi
-       fi
-       echo
-        [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/ldap /var/run/slapd.args
-       return $RETVAL
-}
-
-# See how we were called.
-case "$1" in
-    start)
-        start
-        ;;
-    stop)
-        stop
-        ;;
-    status)
-        status ${slapd}
-        if grep -q "^replogfile" /etc/openldap/slapd.conf ; then
-            status ${slurpd}
-       fi
-       ;;
-    restart)
-       stop
-       start
-       ;;
-    reload)
-       killall -HUP ${slapd}
-       RETVAL=$?
-       if [ $RETVAL -eq 0 ]; then
-           if grep -q "^replogfile" /etc/openldap/slapd.conf; then
-               killall -HUP ${slurpd}
-               RETVAL=$?
-           fi
-       fi
-       ;;
-    condrestart)
-        if [ -f /var/lock/subsys/ldap ] ; then
-            stop
-            start
-        fi
-       ;;
-    *)
-       echo "Usage: $0 start|stop|restart|status|condrestart}"
-       RETVAL=1
-esac
-
-exit $RETVAL
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif
deleted file mode 100644 (file)
index 20af5a0..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-dn: cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com
-objectclass: person
-cn: Torbjorn Tornkvist
-sn: Tornkvist
-telephoneNumber: 545 550 23
-
-dn: cn=Magnus Froberg, ou=people, o=Bluetail AB, dc=bluetail, dc=com
-objectclass: person
-cn: Magnus Froberg
-sn: Froberg
-telephoneNumber: 545 550 26
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif
deleted file mode 100644 (file)
index 24fbb3f..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-dn: o=Post Danmark,c=DK
-objectclass: country
-objectclass: organization
-c: DK
-o: Post Danmark
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl
deleted file mode 100644 (file)
index 6be714a..0000000
Binary files a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl and /dev/null differ
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/slapd.conf b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/slapd.conf
deleted file mode 100644 (file)
index 6bf4fee..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-# $OpenLDAP: pkg/ldap/servers/slapd/slapd.conf,v 1.8.8.4 2000/08/26 17:06:18 kurt Exp $
-#
-# See slapd.conf(5) for details on configuration options.
-# This file should NOT be world readable.
-#
-include                /usr/etc/openldap/schema/core.schema
-
-# Define global ACLs to disable default read access.
-
-# Do not enable referrals until AFTER you have a working directory
-# service AND an understanding of referrals.
-#referral      ldap://root.openldap.org
-
-pidfile                /var/run/slapd.pid
-argsfile       /var/run/slapd.args
-
-# Load dynamic backend modules:
-# modulepath   /usr/libexec/openldap
-# moduleload   back_ldap.la
-# moduleload   back_ldbm.la
-# moduleload   back_passwd.la
-# moduleload   back_shell.la
-
-#######################################################################
-# ldbm database definitions
-#######################################################################
-
-database       ldbm
-suffix         "dc=bluetail, dc=com"
-#suffix                "o=My Organization Name, c=US"
-rootdn         "dc=bluetail, dc=com"
-#rootdn                "cn=Manager, o=My Organization Name, c=US"
-# Cleartext passwords, especially for the rootdn, should
-# be avoid.  See slappasswd(8) and slapd.conf(5) for details.
-# Use of strong authentication encouraged.
-rootpw         hejsan
-# The database directory MUST exist prior to running slapd AND 
-# should only be accessable by the slapd/tools. Mode 700 recommended.
-directory      /usr/var/openldap-ldbm
-# Indices to maintain
-index  objectClass     eq
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/tobbe.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/tobbe.ldif
deleted file mode 100644 (file)
index 1a13f2a..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-dn: cn=Torbjorn Tornkvist,o=Post Danmark,c=DK
-objectclass: person
-cn: Torbjorn Tornkvist
-sn: Tornkvist
-userPassword: qwe123
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-no-ssl-seed.patch b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-no-ssl-seed.patch
deleted file mode 100644 (file)
index 1b5f956..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-diff --git a/src/eldap.erl b/src/eldap.erl
-index 9a78270..b0cdb2e 100644
---- a/src/eldap.erl
-+++ b/src/eldap.erl
-@@ -363,12 +363,5 @@ do_connect(Host, Data, Opts) when Data#eldap.use_tls == false ->
-     gen_tcp:connect(Host, Data#eldap.port, Opts, Data#eldap.timeout);
- do_connect(Host, Data, Opts) when Data#eldap.use_tls == true ->
--    Vsn = erlang:system_info(version),
--    if Vsn >= "5.3" ->
--          %% In R9C, but not in R9B
--          {_,_,X} = erlang:now(),
--          ssl:seed("bkrlnateqqo" ++ integer_to_list(X));
--       true -> true
--    end,
-     ssl:connect(Host, Data#eldap.port, [{verify,0}|Opts]).
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/hash.mk b/rabbitmq-server/plugins-src/eldap-wrapper/hash.mk
deleted file mode 100644 (file)
index 262b7cc..0000000
+++ /dev/null
@@ -1 +0,0 @@
-UPSTREAM_SHORT_HASH:=e309de4
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/license_info b/rabbitmq-server/plugins-src/eldap-wrapper/license_info
deleted file mode 100644 (file)
index 0a0e13c..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-Eldap is "Copyright (c) 2010, Torbjorn Tornkvist" and is covered by
-the MIT license.  It was downloaded from https://github.com/etnt/eldap
-
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/package.mk b/rabbitmq-server/plugins-src/eldap-wrapper/package.mk
deleted file mode 100644 (file)
index 02c8b4e..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-APP_NAME:=eldap
-
-UPSTREAM_GIT:=https://github.com/rabbitmq/eldap.git
-UPSTREAM_REVISION:=e309de4db4b78d67d623
-WRAPPER_PATCHES:=eldap-appify.patch remove-eldap-fsm.patch eldap-no-ssl-seed.patch remove-ietf-doc.patch
-
-ORIGINAL_APP_FILE:=$(CLONE_DIR)/ebin/$(APP_NAME).app
-DO_NOT_GENERATE_APP_FILE=true
-
-GENERATED_DIR:=$(CLONE_DIR)/generated
-PACKAGE_ERLC_OPTS+=-I $(GENERATED_DIR)
-INCLUDE_HRLS+=$(GENERATED_DIR)/ELDAPv3.hrl
-EBIN_BEAMS+=$(GENERATED_DIR)/ELDAPv3.beam
-
-define package_rules
-
-$(CLONE_DIR)/src/ELDAPv3.asn: $(CLONE_DIR)/.done
-
-$(GENERATED_DIR)/ELDAPv3.hrl $(GENERATED_DIR)/ELDAPv3.beam: $(CLONE_DIR)/src/ELDAPv3.asn
-       @mkdir -p $(GENERATED_DIR)
-       $(ERLC) $(PACKAGE_ERLC_OPTS) -o $(GENERATED_DIR) $$<
-
-$(PACKAGE_DIR)+clean::
-       rm -rf $(GENERATED_DIR) $(EBIN_DIR)
-
-# This rule is run *before* the one in do_package.mk
-$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done::
-       cp $(CLONE_DIR)/LICENSE $(PACKAGE_DIR)/LICENSE-MIT-eldap
-
-endef
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch b/rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch
deleted file mode 100644 (file)
index f6b05f6..0000000
+++ /dev/null
@@ -1,952 +0,0 @@
-diff --git a/src/eldap_fsm.erl b/src/eldap_fsm.erl
-deleted file mode 100644
-index 381ce69..0000000
---- a/src/eldap_fsm.erl
-+++ /dev/null
-@@ -1,946 +0,0 @@
---module(eldap_fsm).
--%%% --------------------------------------------------------------------
--%%% Created:  12 Oct 2000 by Tobbe 
--%%% Function: Erlang client LDAP implementation according RFC 2251.
--%%%           The interface is based on RFC 1823, and
--%%%           draft-ietf-asid-ldap-c-api-00.txt
--%%%
--%%% Copyright (C) 2000  Torbjörn Törnkvist
--%%% Copyright (c) 2010 Torbjorn Tornkvist <tobbe@tornkvist.org>
--%%% See MIT-LICENSE at the top dir for licensing information.
--%%% 
--%%% Modified by Sean Hinde <shinde@iee.org> 7th Dec 2000
--%%% Turned into gen_fsm, made non-blocking, added timers etc to support this.
--%%% Now has the concept of a name (string() or atom()) per instance which allows
--%%% multiple users to call by name if so desired.
--%%%
--%%% Can be configured with start_link parameters or use a config file to get
--%%% host to connect to, dn, password, log function etc.
--%%% --------------------------------------------------------------------
--
--
--%%%----------------------------------------------------------------------
--%%% LDAP Client state machine.
--%%% Possible states are:
--%%%     connecting - actually disconnected, but retrying periodically
--%%%     wait_bind_response  - connected and sent bind request
--%%%     active - bound to LDAP Server and ready to handle commands
--%%%----------------------------------------------------------------------
--
--%%-compile(export_all).
--%%-export([Function/Arity, ...]).
--
---behaviour(gen_fsm).
--
--%% External exports
---export([start_link/1, start_link/5, start_link/6]).
--
---export([baseObject/0,singleLevel/0,wholeSubtree/0,close/1,
--       equalityMatch/2,greaterOrEqual/2,lessOrEqual/2,
--       approxMatch/2,search/2,substrings/2,present/1,
--       'and'/1,'or'/1,'not'/1,modify/3, mod_add/2, mod_delete/2,
--       mod_replace/2, add/3, delete/2, modify_dn/5]).
---export([debug_level/2, get_status/1]).
--
--%% gen_fsm callbacks
---export([init/1, connecting/2,
--       connecting/3, wait_bind_response/3, active/3, handle_event/3,
--       handle_sync_event/4, handle_info/3, terminate/3, code_change/4]).
--
--
---import(lists,[concat/1]).
--
---include("ELDAPv3.hrl").
---include("eldap.hrl").
--
---define(LDAP_VERSION, 3).
---define(RETRY_TIMEOUT, 5000).
---define(BIND_TIMEOUT, 10000).
---define(CMD_TIMEOUT, 5000).
---define(MAX_TRANSACTION_ID, 65535).
---define(MIN_TRANSACTION_ID, 0).
--
---record(eldap, {version = ?LDAP_VERSION,
--              hosts,        % Possible hosts running LDAP servers
--              host = null,  % Connected Host LDAP server
--              port = 389 ,  % The LDAP server port
--              fd = null,    % Socket filedescriptor.
--              rootdn = "",  % Name of the entry to bind as
--              passwd,       % Password for (above) entry
--              id = 0,       % LDAP Request ID 
--              log,          % User provided log function
--              bind_timer,   % Ref to bind timeout
--              dict,         % dict holding operation params and results
--              debug_level   % Integer debug/logging level
--             }).
--
--%%%----------------------------------------------------------------------
--%%% API
--%%%----------------------------------------------------------------------
--start_link(Name) ->
--    Reg_name = list_to_atom("eldap_" ++ Name),
--    gen_fsm:start_link({local, Reg_name}, ?MODULE, [], []).
--
--start_link(Name, Hosts, Port, Rootdn, Passwd) ->
--    Log = fun(N, Fmt, Args) -> io:format("---- " ++ Fmt, [Args]) end,
--    Reg_name = list_to_atom("eldap_" ++ Name),
--    gen_fsm:start_link({local, Reg_name}, ?MODULE, {Hosts, Port, Rootdn, Passwd, Log}, []).
--
--start_link(Name, Hosts, Port, Rootdn, Passwd, Log) ->
--    Reg_name = list_to_atom("eldap_" ++ Name),
--    gen_fsm:start_link({local, Reg_name}, ?MODULE, {Hosts, Port, Rootdn, Passwd, Log}, []).
--
--%%% --------------------------------------------------------------------
--%%% Set Debug Level. 0 - none, 1 - errors, 2 - ldap events
--%%% --------------------------------------------------------------------
--debug_level(Handle, N) when integer(N) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_all_state_event(Handle1, {debug_level,N}).
--
--%%% --------------------------------------------------------------------
--%%% Get status of connection.
--%%% --------------------------------------------------------------------
--get_status(Handle) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_all_state_event(Handle1, get_status).
--
--%%% --------------------------------------------------------------------
--%%% Shutdown connection (and process) asynchronous.
--%%% --------------------------------------------------------------------
--close(Handle) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:send_all_state_event(Handle1, close).
--
--%%% --------------------------------------------------------------------
--%%% Add an entry. The entry field MUST NOT exist for the AddRequest
--%%% to succeed. The parent of the entry MUST exist.
--%%% Example:
--%%%
--%%%  add(Handle, 
--%%%         "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
--%%%         [{"objectclass", ["person"]},
--%%%          {"cn", ["Bill Valentine"]},
--%%%          {"sn", ["Valentine"]},
--%%%          {"telephoneNumber", ["545 555 00"]}]
--%%%     )
--%%% --------------------------------------------------------------------
--add(Handle, Entry, Attributes) when list(Entry),list(Attributes) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_event(Handle1, {add, Entry, add_attrs(Attributes)}).
--
--%%% Do sanity check !
--add_attrs(Attrs) ->
--    F = fun({Type,Vals}) when list(Type),list(Vals) -> 
--              %% Confused ? Me too... :-/
--              {'AddRequest_attributes',Type, Vals} 
--      end,
--    case catch lists:map(F, Attrs) of
--      {'EXIT', _} -> throw({error, attribute_values});
--      Else        -> Else
--    end.
--
--
--%%% --------------------------------------------------------------------
--%%% Delete an entry. The entry consists of the DN of 
--%%% the entry to be deleted.
--%%% Example:
--%%%
--%%%  delete(Handle, 
--%%%         "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com"
--%%%        )
--%%% --------------------------------------------------------------------
--delete(Handle, Entry) when list(Entry) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_event(Handle1, {delete, Entry}).
--
--%%% --------------------------------------------------------------------
--%%% Modify an entry. Given an entry a number of modification
--%%% operations can be performed as one atomic operation.
--%%% Example:
--%%%
--%%%  modify(Handle, 
--%%%         "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
--%%%         [replace("telephoneNumber", ["555 555 00"]),
--%%%          add("description", ["LDAP hacker"])] 
--%%%        )
--%%% --------------------------------------------------------------------
--modify(Handle, Object, Mods) when list(Object), list(Mods) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_event(Handle1, {modify, Object, Mods}).
--
--%%%
--%%% Modification operations. 
--%%% Example:
--%%%            replace("telephoneNumber", ["555 555 00"])
--%%%
--mod_add(Type, Values) when list(Type), list(Values)     -> m(add, Type, Values).
--mod_delete(Type, Values) when list(Type), list(Values)  -> m(delete, Type, Values).
--mod_replace(Type, Values) when list(Type), list(Values) -> m(replace, Type, Values).
--
--m(Operation, Type, Values) ->
--    #'ModifyRequest_modification_SEQOF'{
--       operation = Operation,
--       modification = #'AttributeTypeAndValues'{
--       type = Type,
--       vals = Values}}.
--
--%%% --------------------------------------------------------------------
--%%% Modify an entry. Given an entry a number of modification
--%%% operations can be performed as one atomic operation.
--%%% Example:
--%%%
--%%%  modify_dn(Handle, 
--%%%    "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com",
--%%%    "cn=Ben Emerson",
--%%%    true,
--%%%    ""
--%%%        )
--%%% --------------------------------------------------------------------
--modify_dn(Handle, Entry, NewRDN, DelOldRDN, NewSup) 
--  when list(Entry),list(NewRDN),atom(DelOldRDN),list(NewSup) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_event(Handle1, {modify_dn, Entry, NewRDN, bool_p(DelOldRDN), optional(NewSup)}).
--
--%%% Sanity checks !
--
--bool_p(Bool) when Bool==true;Bool==false -> Bool.
--
--optional([])    -> asn1_NOVALUE;
--optional(Value) -> Value.
--
--%%% --------------------------------------------------------------------
--%%% Synchronous search of the Directory returning a 
--%%% requested set of attributes.
--%%%
--%%%  Example:
--%%%
--%%%   Filter = eldap:substrings("sn", [{any,"o"}]),
--%%%   eldap:search(S, [{base, "dc=bluetail, dc=com"},
--%%%                    {filter, Filter},
--%%%                    {attributes,["cn"]}])),
--%%%
--%%% Returned result:  {ok, #eldap_search_result{}}
--%%%
--%%% Example:
--%%%
--%%%  {ok,{eldap_search_result,
--%%%        [{eldap_entry,
--%%%           "cn=Magnus Froberg, dc=bluetail, dc=com",
--%%%           [{"cn",["Magnus Froberg"]}]},
--%%%         {eldap_entry,
--%%%           "cn=Torbjorn Tornkvist, dc=bluetail, dc=com",
--%%%           [{"cn",["Torbjorn Tornkvist"]}]}],
--%%%        []}}
--%%%
--%%% --------------------------------------------------------------------
--search(Handle, A) when record(A, eldap_search) ->
--    call_search(Handle, A);
--search(Handle, L) when list(Handle), list(L) ->
--    case catch parse_search_args(L) of
--      {error, Emsg}                  -> {error, Emsg};
--      {'EXIT', Emsg}                 -> {error, Emsg};
--      A when record(A, eldap_search) -> call_search(Handle, A)
--    end.
--
--call_search(Handle, A) ->
--    Handle1 = get_handle(Handle),
--    gen_fsm:sync_send_event(Handle1, {search, A}).
--
--parse_search_args(Args) ->
--    parse_search_args(Args, #eldap_search{scope = wholeSubtree}).
--                     
--parse_search_args([{base, Base}|T],A) ->
--    parse_search_args(T,A#eldap_search{base = Base});
--parse_search_args([{filter, Filter}|T],A) ->
--    parse_search_args(T,A#eldap_search{filter = Filter});
--parse_search_args([{scope, Scope}|T],A) ->
--    parse_search_args(T,A#eldap_search{scope = Scope});
--parse_search_args([{attributes, Attrs}|T],A) ->
--    parse_search_args(T,A#eldap_search{attributes = Attrs});
--parse_search_args([{types_only, TypesOnly}|T],A) ->
--    parse_search_args(T,A#eldap_search{types_only = TypesOnly});
--parse_search_args([{timeout, Timeout}|T],A) when integer(Timeout) ->
--    parse_search_args(T,A#eldap_search{timeout = Timeout});
--parse_search_args([H|T],A) ->
--    throw({error,{unknown_arg, H}});
--parse_search_args([],A) ->
--    A.
--
--%%%
--%%% The Scope parameter
--%%%
--baseObject()   -> baseObject.
--singleLevel()  -> singleLevel.
--wholeSubtree() -> wholeSubtree.
--
--%%%
--%%% Boolean filter operations
--%%%
--'and'(ListOfFilters) when list(ListOfFilters) -> {'and',ListOfFilters}.
--'or'(ListOfFilters)  when list(ListOfFilters) -> {'or', ListOfFilters}.
--'not'(Filter)        when tuple(Filter)       -> {'not',Filter}.
--
--%%%
--%%% The following Filter parameters consist of an attribute
--%%% and an attribute value. Example: F("uid","tobbe")
--%%%
--equalityMatch(Desc, Value)   -> {equalityMatch, av_assert(Desc, Value)}.
--greaterOrEqual(Desc, Value)  -> {greaterOrEqual, av_assert(Desc, Value)}.
--lessOrEqual(Desc, Value)     -> {lessOrEqual, av_assert(Desc, Value)}.
--approxMatch(Desc, Value)     -> {approxMatch, av_assert(Desc, Value)}.
--
--av_assert(Desc, Value) ->
--    #'AttributeValueAssertion'{attributeDesc  = Desc,
--                             assertionValue = Value}.
--
--%%%
--%%% Filter to check for the presence of an attribute
--%%%
--present(Attribute) when list(Attribute) -> 
--    {present, Attribute}.
--
--
--%%%
--%%% A substring filter seem to be based on a pattern:
--%%%
--%%%   InitValue*AnyValue*FinalValue
--%%%
--%%% where all three parts seem to be optional (at least when
--%%% talking with an OpenLDAP server). Thus, the arguments
--%%% to substrings/2 looks like this:
--%%%
--%%% Type   ::= string( <attribute> )
--%%% SubStr ::= listof( {initial,Value} | {any,Value}, {final,Value})
--%%%
--%%% Example: substrings("sn",[{initial,"To"},{any,"kv"},{final,"st"}])
--%%% will match entries containing:  'sn: Tornkvist'
--%%%
--substrings(Type, SubStr) when list(Type), list(SubStr) -> 
--    Ss = {'SubstringFilter_substrings',v_substr(SubStr)},
--    {substrings,#'SubstringFilter'{type = Type,
--                                 substrings = Ss}}.
--
--
--get_handle(Pid) when pid(Pid)    -> Pid;
--get_handle(Atom) when atom(Atom) -> Atom;
--get_handle(Name) when list(Name) -> list_to_atom("eldap_" ++ Name).
--%%%----------------------------------------------------------------------
--%%% Callback functions from gen_fsm
--%%%----------------------------------------------------------------------
--
--%%----------------------------------------------------------------------
--%% Func: init/1
--%% Returns: {ok, StateName, StateData}          |
--%%          {ok, StateName, StateData, Timeout} |
--%%          ignore                              |
--%%          {stop, StopReason}             
--%% I use the trick of setting a timeout of 0 to pass control into the
--%% process.      
--%%----------------------------------------------------------------------
--init([]) ->
--    case get_config() of
--      {ok, Hosts, Rootdn, Passwd, Log} ->
--          init({Hosts, Rootdn, Passwd, Log});
--      {error, Reason} ->
--          {stop, Reason}
--    end;
--init({Hosts, Port, Rootdn, Passwd, Log}) ->
--    {ok, connecting, #eldap{hosts = Hosts,
--                          port = Port,
--                          rootdn = Rootdn,
--                          passwd = Passwd,
--                          id = 0,
--                          log = Log,
--                          dict = dict:new(),
--                          debug_level = 0}, 0}.
--
--%%----------------------------------------------------------------------
--%% Func: StateName/2
--%% Called when gen_fsm:send_event/2,3 is invoked (async)
--%% Returns: {next_state, NextStateName, NextStateData}          |
--%%          {next_state, NextStateName, NextStateData, Timeout} |
--%%          {stop, Reason, NewStateData}                         
--%%----------------------------------------------------------------------
--connecting(timeout, S) ->
--    {ok, NextState, NewS} = connect_bind(S),
--    {next_state, NextState, NewS}.
--
--%%----------------------------------------------------------------------
--%% Func: StateName/3
--%% Called when gen_fsm:sync_send_event/2,3 is invoked.
--%% Returns: {next_state, NextStateName, NextStateData}            |
--%%          {next_state, NextStateName, NextStateData, Timeout}   |
--%%          {reply, Reply, NextStateName, NextStateData}          |
--%%          {reply, Reply, NextStateName, NextStateData, Timeout} |
--%%          {stop, Reason, NewStateData}                          |
--%%          {stop, Reason, Reply, NewStateData}                    
--%%----------------------------------------------------------------------
--connecting(Event, From, S) ->
--    Reply = {error, connecting},
--    {reply, Reply, connecting, S}.
--
--wait_bind_response(Event, From, S) ->
--    Reply = {error, wait_bind_response},
--    {reply, Reply, wait_bind_response, S}.
--
--active(Event, From, S) ->
--    case catch send_command(Event, From, S) of
--      {ok, NewS} ->
--          {next_state, active, NewS};
--      {error, Reason} ->
--          {reply, {error, Reason}, active, S};
--      {'EXIT', Reason} ->
--          {reply, {error, Reason}, active, S}
--    end.
--
--%%----------------------------------------------------------------------
--%% Func: handle_event/3
--%% Called when gen_fsm:send_all_state_event/2 is invoked.
--%% Returns: {next_state, NextStateName, NextStateData}          |
--%%          {next_state, NextStateName, NextStateData, Timeout} |
--%%          {stop, Reason, NewStateData}                         
--%%----------------------------------------------------------------------
--handle_event(close, StateName, S) ->
--    gen_tcp:close(S#eldap.fd),
--    {stop, closed, S};
--
--handle_event(Event, StateName, S) ->
--    {next_state, StateName, S}.
--
--%%----------------------------------------------------------------------
--%% Func: handle_sync_event/4
--%% Called when gen_fsm:sync_send_all_state_event/2,3 is invoked
--%% Returns: {next_state, NextStateName, NextStateData}            |
--%%          {next_state, NextStateName, NextStateData, Timeout}   |
--%%          {reply, Reply, NextStateName, NextStateData}          |
--%%          {reply, Reply, NextStateName, NextStateData, Timeout} |
--%%          {stop, Reason, NewStateData}                          |
--%%          {stop, Reason, Reply, NewStateData}                    
--%%----------------------------------------------------------------------
--handle_sync_event({debug_level, N}, From, StateName, S) ->
--    {reply, ok, StateName, S#eldap{debug_level = N}};
--
--handle_sync_event(Event, From, StateName, S) ->
--    {reply, {StateName, S}, StateName, S};
--
--handle_sync_event(Event, From, StateName, S) ->
--    Reply = ok,
--    {reply, Reply, StateName, S}.
--
--%%----------------------------------------------------------------------
--%% Func: handle_info/3
--%% Returns: {next_state, NextStateName, NextStateData}          |
--%%          {next_state, NextStateName, NextStateData, Timeout} |
--%%          {stop, Reason, NewStateData}                         
--%%----------------------------------------------------------------------
--
--%%
--%% Packets arriving in various states
--%%
--handle_info({tcp, Socket, Data}, connecting, S) ->
--    log1("eldap. tcp packet received when disconnected!~n~p~n", [Data], S),
--    {next_state, connecting, S};
--
--handle_info({tcp, Socket, Data}, wait_bind_response, S) ->
--    cancel_timer(S#eldap.bind_timer),
--    case catch recvd_wait_bind_response(Data, S) of
--      bound                -> {next_state, active, S};
--      {fail_bind, Reason}  -> close_and_retry(S),
--                              {next_state, connecting, S#eldap{fd = null}};
--      {'EXIT', Reason}     -> close_and_retry(S),
--                              {next_state, connecting, S#eldap{fd = null}};
--      {error, Reason}      -> close_and_retry(S),
--                              {next_state, connecting, S#eldap{fd = null}}
--    end;
--
--handle_info({tcp, Socket, Data}, active, S) ->
--    case catch recvd_packet(Data, S) of
--      {reply, Reply, To, NewS} -> gen_fsm:reply(To, Reply),
--                                  {next_state, active, NewS};
--      {ok, NewS}               -> {next_state, active, NewS};
--      {'EXIT', Reason}         -> {next_state, active, S};
--      {error, Reason}          -> {next_state, active, S}
--    end;
--
--handle_info({tcp_closed, Socket}, All_fsm_states, S) ->
--    F = fun(Id, [{Timer, From, Name}|Res]) ->
--              gen_fsm:reply(From, {error, tcp_closed}),
--              cancel_timer(Timer)
--      end,
--    dict:map(F, S#eldap.dict),
--    retry_connect(),
--    {next_state, connecting, S#eldap{fd = null,
--                                   dict = dict:new()}};
--
--handle_info({tcp_error, Socket, Reason}, Fsm_state, S) ->
--    log1("eldap received tcp_error: ~p~nIn State: ~p~n", [Reason, Fsm_state], S),
--    {next_state, Fsm_state, S};
--%%
--%% Timers
--%%
--handle_info({timeout, Timer, {cmd_timeout, Id}}, active, S) ->
--    case cmd_timeout(Timer, Id, S) of
--      {reply, To, Reason, NewS} -> gen_fsm:reply(To, Reason),
--                                   {next_state, active, NewS};
--      {error, Reason}           -> {next_state, active, S}
--    end;
--
--handle_info({timeout, retry_connect}, connecting, S) ->
--    {ok, NextState, NewS} = connect_bind(S), 
--    {next_state, NextState, NewS};
--
--handle_info({timeout, Timer, bind_timeout}, wait_bind_response, S) ->
--    close_and_retry(S),
--    {next_state, connecting, S#eldap{fd = null}};
--
--%%
--%% Make sure we don't fill the message queue with rubbish
--%%
--handle_info(Info, StateName, S) ->
--    log1("eldap. Unexpected Info: ~p~nIn state: ~p~n when StateData is: ~p~n",
--                      [Info, StateName, S], S),
--    {next_state, StateName, S}.
--
--%%----------------------------------------------------------------------
--%% Func: terminate/3
--%% Purpose: Shutdown the fsm
--%% Returns: any
--%%----------------------------------------------------------------------
--terminate(Reason, StateName, StatData) ->
--    ok.
--
--%%----------------------------------------------------------------------
--%% Func: code_change/4
--%% Purpose: Convert process state when code is changed
--%% Returns: {ok, NewState, NewStateData}
--%%----------------------------------------------------------------------
--code_change(OldVsn, StateName, S, Extra) ->
--    {ok, StateName, S}.
--
--%%%----------------------------------------------------------------------
--%%% Internal functions
--%%%----------------------------------------------------------------------
--send_command(Command, From, S) ->
--    Id = bump_id(S),
--    {Name, Request} = gen_req(Command),
--    Message = #'LDAPMessage'{messageID  = Id,
--                           protocolOp = {Name, Request}},
--    log2("~p~n",[{Name, Request}], S),
--    {ok, Bytes} = asn1rt:encode('ELDAPv3', 'LDAPMessage', Message),
--    ok = gen_tcp:send(S#eldap.fd, Bytes),
--    Timer = erlang:start_timer(?CMD_TIMEOUT, self(), {cmd_timeout, Id}),
--    New_dict = dict:store(Id, [{Timer, From, Name}], S#eldap.dict),
--    {ok, S#eldap{id = Id,
--               dict = New_dict}}.
--
--gen_req({search, A}) ->
--    {searchRequest,
--     #'SearchRequest'{baseObject   = A#eldap_search.base,
--                    scope        = v_scope(A#eldap_search.scope),
--                    derefAliases = neverDerefAliases,
--                    sizeLimit    = 0, % no size limit
--                    timeLimit    = v_timeout(A#eldap_search.timeout),
--                    typesOnly    = v_bool(A#eldap_search.types_only),
--                    filter       = v_filter(A#eldap_search.filter),
--                    attributes   = v_attributes(A#eldap_search.attributes)
--                   }};
--gen_req({add, Entry, Attrs}) ->
--    {addRequest,
--     #'AddRequest'{entry      = Entry,
--                 attributes = Attrs}};
--gen_req({delete, Entry}) ->
--    {delRequest, Entry};
--gen_req({modify, Obj, Mod}) ->
--    v_modifications(Mod),
--    {modifyRequest, 
--     #'ModifyRequest'{object       = Obj,
--                    modification = Mod}};
--gen_req({modify_dn, Entry, NewRDN, DelOldRDN, NewSup}) ->
--    {modDNRequest,
--     #'ModifyDNRequest'{entry        = Entry,
--                      newrdn       = NewRDN,
--                      deleteoldrdn = DelOldRDN,
--                      newSuperior  = NewSup}}.
--     
--%%-----------------------------------------------------------------------
--%% recvd_packet
--%% Deals with incoming packets in the active state
--%% Will return one of:
--%%  {ok, NewS} - Don't reply to client yet as this is part of a search 
--%%               result and we haven't got all the answers yet.
--%%  {reply, Result, From, NewS} - Reply with result to client From
--%%  {error, Reason}
--%%  {'EXIT', Reason} - Broke
--%%-----------------------------------------------------------------------
--recvd_packet(Pkt, S) ->
--    check_tag(Pkt),
--    case asn1rt:decode('ELDAPv3', 'LDAPMessage', Pkt) of
--      {ok,Msg} ->
--          Op = Msg#'LDAPMessage'.protocolOp,
--          log2("~p~n",[Op], S),
--          Dict = S#eldap.dict,
--          Id = Msg#'LDAPMessage'.messageID,
--          {Timer, From, Name, Result_so_far} = get_op_rec(Id, Dict),
--          case {Name, Op} of
--              {searchRequest, {searchResEntry, R}} when
--                    record(R,'SearchResultEntry') ->
--                  New_dict = dict:append(Id, R, Dict),
--                  {ok, S#eldap{dict = New_dict}};
--              {searchRequest, {searchResDone, Result}} ->
--                  case Result#'LDAPResult'.resultCode of
--                      success ->
--                          {Res, Ref} = polish(Result_so_far),
--                          New_dict = dict:erase(Id, Dict),
--                          cancel_timer(Timer),
--                          {reply, #eldap_search_result{entries = Res,
--                                                       referrals = Ref}, From,
--                                                    S#eldap{dict = New_dict}};
--                      Reason ->
--                          New_dict = dict:erase(Id, Dict),
--                          cancel_timer(Timer),
--                          {reply, {error, Reason}, From, S#eldap{dict = New_dict}}
--                      end;
--              {searchRequest, {searchResRef, R}} ->
--                  New_dict = dict:append(Id, R, Dict),
--                  {ok, S#eldap{dict = New_dict}};
--              {addRequest, {addResponse, Result}} ->
--                  New_dict = dict:erase(Id, Dict),
--                  cancel_timer(Timer),
--                  Reply = check_reply(Result, From),
--                  {reply, Reply, From, S#eldap{dict = New_dict}};
--              {delRequest, {delResponse, Result}} ->
--                  New_dict = dict:erase(Id, Dict),
--                  cancel_timer(Timer),
--                  Reply = check_reply(Result, From),
--                  {reply, Reply, From, S#eldap{dict = New_dict}};
--              {modifyRequest, {modifyResponse, Result}} ->
--                  New_dict = dict:erase(Id, Dict),
--                  cancel_timer(Timer),
--                  Reply = check_reply(Result, From),
--                  {reply, Reply, From, S#eldap{dict = New_dict}};
--              {modDNRequest, {modDNResponse, Result}} ->
--                  New_dict = dict:erase(Id, Dict),
--                  cancel_timer(Timer),
--                  Reply = check_reply(Result, From),
--                  {reply, Reply, From, S#eldap{dict = New_dict}};
--              {OtherName, OtherResult} ->
--                  New_dict = dict:erase(Id, Dict),
--                  cancel_timer(Timer),
--                  {reply, {error, {invalid_result, OtherName, OtherResult}},
--                          From, S#eldap{dict = New_dict}}
--          end;
--      Error -> Error
--    end.
--
--check_reply(#'LDAPResult'{resultCode = success}, From) ->
--    ok;
--check_reply(#'LDAPResult'{resultCode = Reason}, From) ->
--    {error, Reason};
--check_reply(Other, From) ->
--    {error, Other}.
--
--get_op_rec(Id, Dict) ->
--    case dict:find(Id, Dict) of
--      {ok, [{Timer, From, Name}|Res]} ->
--          {Timer, From, Name, Res};
--      error ->
--          throw({error, unkown_id})
--    end.
--
--%%-----------------------------------------------------------------------
--%% recvd_wait_bind_response packet
--%% Deals with incoming packets in the wait_bind_response state
--%% Will return one of:
--%%  bound - Success - move to active state
--%%  {fail_bind, Reason} - Failed
--%%  {error, Reason}
--%%  {'EXIT', Reason} - Broken packet
--%%-----------------------------------------------------------------------
--recvd_wait_bind_response(Pkt, S) ->
--    check_tag(Pkt),
--    case asn1rt:decode('ELDAPv3', 'LDAPMessage', Pkt) of
--      {ok,Msg} ->
--          log2("~p", [Msg], S),
--          check_id(S#eldap.id, Msg#'LDAPMessage'.messageID),
--          case Msg#'LDAPMessage'.protocolOp of
--              {bindResponse, Result} ->
--                  case Result#'LDAPResult'.resultCode of
--                      success -> bound;
--                      Error   -> {fail_bind, Error}
--                  end
--          end;
--      Else ->
--          {fail_bind, Else}
--    end.
--
--check_id(Id, Id) -> ok;
--check_id(_, _)   -> throw({error, wrong_bind_id}).
--
--%%-----------------------------------------------------------------------
--%% General Helpers
--%%-----------------------------------------------------------------------
--
--cancel_timer(Timer) ->
--    erlang:cancel_timer(Timer),
--    receive
--      {timeout, Timer, _} ->
--          ok
--    after 0 ->
--          ok
--    end.
--
--
--%%% Sanity check of received packet
--check_tag(Data) ->
--    case asn1rt_ber:decode_tag(Data) of
--      {Tag, Data1, Rb} ->
--          case asn1rt_ber:decode_length(Data1) of
--              {{Len,Data2}, Rb2} -> ok;
--              _ -> throw({error,decoded_tag_length})
--          end;
--      _ -> throw({error,decoded_tag})
--    end.
--
--close_and_retry(S) ->
--    gen_tcp:close(S#eldap.fd),
--    retry_connect().
--
--retry_connect() ->
--    erlang:send_after(?RETRY_TIMEOUT, self(),
--                    {timeout, retry_connect}).
--
--
--%%-----------------------------------------------------------------------
--%% Sort out timed out commands
--%%-----------------------------------------------------------------------
--cmd_timeout(Timer, Id, S) ->
--    Dict = S#eldap.dict,
--    case dict:find(Id, Dict) of
--      {ok, [{Id, Timer, From, Name}|Res]} ->
--          case Name of
--              searchRequest ->
--                  {Res1, Ref1} = polish(Res),
--                  New_dict = dict:erase(Id, Dict),
--                  {reply, From, {timeout,
--                                 #eldap_search_result{entries = Res1,
--                                                      referrals = Ref1}},
--                                 S#eldap{dict = New_dict}};
--              Others ->
--                  New_dict = dict:erase(Id, Dict),
--                  {reply, From, {error, timeout}, S#eldap{dict = New_dict}}
--          end;
--      error ->
--          {error, timed_out_cmd_not_in_dict}
--    end.
--
--%%-----------------------------------------------------------------------
--%% Common stuff for results
--%%-----------------------------------------------------------------------
--%%%
--%%% Polish the returned search result
--%%%
--
--polish(Entries) ->
--    polish(Entries, [], []).
--
--polish([H|T], Res, Ref) when record(H, 'SearchResultEntry') ->
--    ObjectName = H#'SearchResultEntry'.objectName,
--    F = fun({_,A,V}) -> {A,V} end,
--    Attrs = lists:map(F, H#'SearchResultEntry'.attributes),
--    polish(T, [#eldap_entry{object_name = ObjectName,
--                          attributes  = Attrs}|Res], Ref);
--polish([H|T], Res, Ref) ->     % No special treatment of referrals at the moment.
--    polish(T, Res, [H|Ref]);
--polish([], Res, Ref) ->
--    {Res, Ref}.
--
--%%-----------------------------------------------------------------------
--%% Connect to next server in list and attempt to bind to it.
--%%-----------------------------------------------------------------------
--connect_bind(S) ->
--    Host = next_host(S#eldap.host, S#eldap.hosts),
--    TcpOpts = [{packet, asn1}, {active, true}],
--    case gen_tcp:connect(Host, S#eldap.port, TcpOpts) of
--      {ok, Socket} ->
--          case bind_request(Socket, S) of
--              {ok, NewS} ->
--                  Timer = erlang:start_timer(?BIND_TIMEOUT, self(),
--                                             {timeout, bind_timeout}),
--                  {ok, wait_bind_response, NewS#eldap{fd = Socket,
--                                                      host = Host,
--                                                      bind_timer = Timer}};
--              {error, Reason} ->
--                  gen_tcp:close(Socket),
--                  erlang:send_after(?RETRY_TIMEOUT, self(),
--                                    {timeout, retry_connect}),
--                  {ok, connecting, S#eldap{host = Host}}
--          end;
--      {error, Reason} ->
--          erlang:send_after(?RETRY_TIMEOUT, self(), 
--                            {timeout, retry_connect}),
--          {ok, connecting, S#eldap{host = Host}}
--    end.
--
--bind_request(Socket, S) ->
--    Id = bump_id(S),
--    Req = #'BindRequest'{version        = S#eldap.version,
--                       name           = S#eldap.rootdn,  
--                       authentication = {simple, S#eldap.passwd}},
--    Message = #'LDAPMessage'{messageID  = Id,
--                           protocolOp = {bindRequest, Req}},
--    log2("Message:~p~n",[Message], S),
--    {ok, Bytes} = asn1rt:encode('ELDAPv3', 'LDAPMessage', Message),
--    ok = gen_tcp:send(Socket, Bytes),
--    {ok, S#eldap{id = Id}}.
--
--%% Given last tried Server, find next one to try
--next_host(null, [H|_]) -> H;                  % First time, take first
--next_host(Host, Hosts) ->                     % Find next in turn
--    next_host(Host, Hosts, Hosts).
--
--next_host(Host, [Host], Hosts) -> hd(Hosts);  % Wrap back to first
--next_host(Host, [Host|Tail], Hosts) -> hd(Tail);      % Take next
--next_host(Host, [], Hosts) -> hd(Hosts);      % Never connected before? (shouldn't happen)
--next_host(Host, [H|T], Hosts) -> next_host(Host, T, Hosts).
--
--
--%%% --------------------------------------------------------------------
--%%% Verify the input data
--%%% --------------------------------------------------------------------
--
--v_filter({'and',L})           -> {'and',L};
--v_filter({'or', L})           -> {'or',L};
--v_filter({'not',L})           -> {'not',L};
--v_filter({equalityMatch,AV})  -> {equalityMatch,AV};
--v_filter({greaterOrEqual,AV}) -> {greaterOrEqual,AV};
--v_filter({lessOrEqual,AV})    -> {lessOrEqual,AV};
--v_filter({approxMatch,AV})    -> {approxMatch,AV};
--v_filter({present,A})         -> {present,A};
--v_filter({substrings,S}) when record(S,'SubstringFilter') -> {substrings,S};
--v_filter(_Filter) -> throw({error,concat(["unknown filter: ",_Filter])}).
--
--v_modifications(Mods) ->
--    F = fun({_,Op,_}) ->
--              case lists:member(Op,[add,delete,replace]) of
--                  true -> true;
--                  _    -> throw({error,{mod_operation,Op}})
--              end
--      end,
--    lists:foreach(F, Mods).
--
--v_substr([{Key,Str}|T]) when list(Str),Key==initial;Key==any;Key==final ->
--    [{Key,Str}|v_substr(T)];
--v_substr([H|T]) ->
--    throw({error,{substring_arg,H}});
--v_substr([]) -> 
--    [].
--v_scope(baseObject)   -> baseObject;
--v_scope(singleLevel)  -> singleLevel;
--v_scope(wholeSubtree) -> wholeSubtree;
--v_scope(_Scope)       -> throw({error,concat(["unknown scope: ",_Scope])}).
--
--v_bool(true)  -> true;
--v_bool(false) -> false;
--v_bool(_Bool) -> throw({error,concat(["not Boolean: ",_Bool])}).
--
--v_timeout(I) when integer(I), I>=0 -> I;
--v_timeout(_I) -> throw({error,concat(["timeout not positive integer: ",_I])}).
--
--v_attributes(Attrs) ->
--    F = fun(A) when list(A) -> A;
--         (A) -> throw({error,concat(["attribute not String: ",A])})
--      end,
--    lists:map(F,Attrs).
--
--
--%%% --------------------------------------------------------------------
--%%% Get and Validate the initial configuration
--%%% --------------------------------------------------------------------
--get_config() ->
--    Priv_dir = code:priv_dir(eldap),
--    File = filename:join(Priv_dir, "eldap.conf"),
--    case file:consult(File) of
--      {ok, Entries} ->
--          case catch parse(Entries) of
--              {ok, Hosts, Port, Rootdn, Passwd, Log} ->
--                  {ok, Hosts, Port, Rootdn, Passwd, Log};
--              {error, Reason} ->
--                  {error, Reason};
--              {'EXIT', Reason} ->
--                  {error, Reason}
--          end;
--      {error, Reason} ->
--          {error, Reason}
--    end.
--
--parse(Entries) ->
--    {ok,
--     get_hosts(host, Entries),
--     get_integer(port, Entries),
--     get_list(rootdn, Entries),
--     get_list(passwd, Entries),
--     get_log(log, Entries)}.
--
--get_integer(Key, List) ->
--    case lists:keysearch(Key, 1, List) of
--      {value, {Key, Value}} when integer(Value) ->
--          Value;
--      {value, {Key, Value}} ->
--          throw({error, "Bad Value in Config for " ++ atom_to_list(Key)});
--      false ->
--          throw({error, "No Entry in Config for " ++ atom_to_list(Key)})
--    end.
--
--get_list(Key, List) ->
--    case lists:keysearch(Key, 1, List) of
--      {value, {Key, Value}} when list(Value) ->
--          Value;
--      {value, {Key, Value}} ->
--          throw({error, "Bad Value in Config for " ++ atom_to_list(Key)});
--      false ->
--          throw({error, "No Entry in Config for " ++ atom_to_list(Key)})
--    end.
--
--get_log(Key, List) ->
--    case lists:keysearch(Key, 1, List) of
--      {value, {Key, Value}} when function(Value) ->
--          Value;
--      {value, {Key, Else}} ->
--          false;
--      false ->
--          fun(Level, Format, Args) -> io:format("--- " ++ Format, Args) end
--    end.
--
--get_hosts(Key, List) ->
--    lists:map(fun({Key1, {A,B,C,D}}) when integer(A),
--                                        integer(B),
--                                        integer(C),
--                                        integer(D),
--                                        Key == Key1->
--                    {A,B,C,D};
--               ({Key1, Value}) when list(Value),
--                                    Key == Key1->
--                    Value;
--               ({Else, Value}) ->
--                    throw({error, "Bad Hostname in config"}) 
--            end, List).
--
--%%% --------------------------------------------------------------------
--%%% Other Stuff
--%%% --------------------------------------------------------------------
--bump_id(#eldap{id = Id}) when Id > ?MAX_TRANSACTION_ID -> 
--    ?MIN_TRANSACTION_ID;
--bump_id(#eldap{id = Id}) ->
--    Id + 1.
--
--%%% --------------------------------------------------------------------
--%%% Log routines. Call a user provided log routine Fun.
--%%% --------------------------------------------------------------------
--
--log1(Str, Args, #eldap{log = Fun, debug_level = N}) -> log(Fun, Str, Args, 1, N).
--log2(Str, Args, #eldap{log = Fun, debug_level = N}) -> log(Fun, Str, Args, 2, N).
--
--log(Fun, Str, Args, This_level, Status) when function(Fun), This_level =< Status ->
--    catch Fun(This_level, Str, Args);
--log(_, _, _, _, _) -> 
--    ok.
diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch b/rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch
deleted file mode 100644 (file)
index e1f55d9..0000000
+++ /dev/null
@@ -1,3036 +0,0 @@
-diff --git a/doc/draft-ietf-asid-ldap-c-api-00.txt b/doc/draft-ietf-asid-ldap-c-api-00.txt
-deleted file mode 100755
-index 5f2e856..0000000
---- a/doc/draft-ietf-asid-ldap-c-api-00.txt
-+++ /dev/null
-@@ -1,3030 +0,0 @@
--
--
--
--
--
--
--Network Working Group                                           T. Howes
--INTERNET-DRAFT                             Netscape Communications Corp.
--Intended Category: Standards Track                              M. Smith
--Obsoletes: RFC 1823                        Netscape Communications Corp.
--Expires: January 1998                                          A. Herron
--                                                         Microsoft Corp.
--                                                               C. Weider
--                                                         Microsoft Corp.
--                                                                 M. Wahl
--                                                    Critical Angle, Inc.
--
--                                                            29 July 1997
--
--
--                The C LDAP Application Program Interface
--                  <draft-ietf-asid-ldap-c-api-00.txt>
--
--
--
--1.  Status of this Memo
--
--This draft document will be submitted to the RFC Editor as a Standards
--Track document. Distribution of this memo is unlimited. Please send com-
--ments to the authors.
--
--This document is an Internet-Draft.  Internet-Drafts are working docu-
--ments of the Internet Engineering Task Force (IETF), its areas, and its
--working groups.  Note that other groups may also distribute working
--documents as Internet-Drafts.
--
--Internet-Drafts are draft documents valid for a maximum of six months
--and may be updated, replaced, or obsoleted by other documents at any
--time.  It is inappropriate to use Internet-Drafts as reference material
--or to cite them other than as ``work in progress.''
--
--To learn the current status of any Internet-Draft, please check the
--``1id-abstracts.txt'' listing contained in the Internet-Drafts Shadow
--Directories on ds.internic.net (US East Coast), nic.nordu.net (Europe),
--ftp.isi.edu (US West Coast), or munnari.oz.au (Pacific Rim).
--
--2.  Introduction
--
--This document defines a C language application program interface to the
--lightweight directory access protocol (LDAP). This document replaces the
--previous definition of this API, defined in RFC 1823, updating it to
--include support for features found in version 3 of the LDAP protocol.
--New extended operation functions were added to support LDAPv3 features
--such as controls.  In addition, other LDAP API changes were made to
--
--
--
--Expires: January 1998                                           [Page 1]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--support information hiding and thread safety.
--
--The C LDAP API is designed to be powerful, yet simple to use. It defines
--compatible synchronous and asynchronous interfaces to LDAP to suit a
--wide variety of applications. This document gives a brief overview of
--the LDAP model, then an overview of how the API is used by an applica-
--tion program to obtain LDAP information.  The API calls are described in
--detail, followed by an appendix that provides some example code demon-
--strating the use of the API. This document provides information to the
--Internet community. It does not specify any standard.
--
--3.  Overview of the LDAP Model
--
--LDAP is the lightweight directory access protocol, described in [2] and
--[6]. It can provide a lightweight frontend to the X.500 directory [1],
--or a stand-alone service. In either mode, LDAP is based on a client-
--server model in which a client makes a TCP connection to an LDAP server,
--over which it sends requests and receives responses.
--
--The LDAP information model is based on the entry, which contains infor-
--mation about some object (e.g., a person).  Entries are composed of
--attributes, which have a type and one or more values. Each attribute has
--a syntax that determines what kinds of values are allowed in the attri-
--bute (e.g., ASCII characters, a jpeg photograph, etc.) and how those
--values behave during directory operations (e.g., is case significant
--during comparisons).
--
--Entries may be organized in a tree structure, usually based on politi-
--cal, geographical, and organizational boundaries. Each entry is uniquely
--named relative to its sibling entries by its relative distinguished name
--(RDN) consisting of one or more distinguished attribute values from the
--entry.  At most one value from each attribute may be used in the RDN.
--For example, the entry for the person Babs Jensen might be named with
--the "Barbara Jensen" value from the commonName attribute.
--
--A globally unique name for an entry, called a distinguished name or DN,
--is constructed by concatenating the sequence of RDNs from the entry up
--to the root of the tree. For example, if Babs worked for the University
--of Michigan, the DN of her U-M entry might be "cn=Barbara Jensen,
--o=University of Michigan, c=US". The DN format used by LDAP is defined
--in [4].
--
--Operations are provided to authenticate, search for and retrieve infor-
--mation, modify information, and add and delete entries from the tree.
--The next sections give an overview of how the API is used and detailed
--descriptions of the LDAP API calls that implement all of these func-
--tions.
--
--
--
--
--Expires: January 1998                                           [Page 2]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--4.  Overview of LDAP API Use
--
--An application generally uses the C LDAP API in four simple steps.
--
---    Initialize an LDAP session with a default LDAP server. The
--     ldap_init() function returns a handle to the session, allowing mul-
--     tiple connections to be open at once.
--
---    Authenticate to the LDAP server. The ldap_bind() function and
--     friends support a variety of authentication methods.
--
---    Perform some LDAP operations and obtain some results. ldap_search()
--     and friends return results which can be parsed by
--     ldap_result2error(), ldap_first_entry(), ldap_next_entry(), etc.
--
---    Close the session. The ldap_unbind() function closes the connec-
--     tion.
--
--Operations can be performed either synchronously or asynchronously.  The
--names of the synchronous functions end in _s. For example, a synchronous
--search can be completed by calling ldap_search_s(). An asynchronous
--search can be initiated by calling ldap_search(). All synchronous rou-
--tines return an indication of the outcome of the operation (e.g, the
--constant LDAP_SUCCESS or some other error code).  The asynchronous rou-
--tines return the message id of the operation initiated. This id can be
--used in subsequent calls to ldap_result() to obtain the result(s) of the
--operation. An asynchronous operation can be abandoned by calling
--ldap_abandon().
--
--Results and errors are returned in an opaque structure called LDAPMes-
--sage.  Routines are provided to parse this structure, step through
--entries and attributes returned, etc. Routines are also provided to
--interpret errors. Later sections of this document describe these rou-
--tines in more detail.
--
--LDAP version 3 servers may return referrals to other servers.  By
--default, implementations of this API will attempt to follow referrals
--automatically for the application.  This behavior can be disabled glo-
--bally (using the ldap_set_option() call) or on a per-request basis
--through the use of a client control.
--
--As in the LDAPv3 protocol itself, all DNs and string values that are
--passed into or produced by the C LDAP API are represented as UTF-8[10]
--characters.
--
--For compatibility with existing applications, implementations of this
--API will by default use version 2 of the LDAP protocol.  Applications
--that intend to take advantage of LDAP version 3 features will need to
--
--
--
--Expires: January 1998                                           [Page 3]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--use the ldap_set_option() call with a LDAP_OPT_PROTOCOL_VERSION to
--switch to version 3.
--
--
--5.  Common Data Structures
--
--Some data structures that are common to several LDAP API functions are
--defined here:
--
--           typedef struct ldap LDAP;
--
--           typedef struct ldapmsg LDAPMessage;
--
--           struct berval {
--                   unsigned long   bv_len;
--                   char            *bv_val;
--           };
--
--           struct timeval {
--                   long            tv_sec;
--                   long            tv_usec;
--           };
--
--The LDAP structure is an opaque data type that represents an LDAP ses-
--sion Typically this corresponds to a connection to a single server, but
--it may encompass several server connections in the face of LDAPv3 refer-
--rals.
--
--The LDAPMessage structure is an opaque data type that is used to return
--results and error information.
--
--The berval structure is used to represent arbitrary binary data and its
--fields have the following meanings:
--
--bv_len   Length of data in bytes.
--
--bv_val   A pointer to the data itself.
--
--
--The timeval structure is used to represent an interval of time and its
--fields have the following meanings:
--
--tv_sec   Seconds component of time interval.
--
--tv_usec  Microseconds component of time interval.
--
--
--
--
--
--
--Expires: January 1998                                           [Page 4]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--6.  LDAP Error Codes
--
--Many of the LDAP API routines return LDAP error codes, some of which
--indicate local errors and some of which may be returned by servers.
--Supported error codes are (hexadecimal values are given in parentheses
--after the constant):
--
--           LDAP_SUCCESS (0x00)
--           LDAP_OPERATIONS_ERROR( 0x01)
--           LDAP_PROTOCOL_ERROR (0x02)
--           LDAP_TIMELIMIT_EXCEEDED (0x03)
--           LDAP_SIZELIMIT_EXCEEDED (0x04)
--           LDAP_COMPARE_FALSE (0x05)
--           LDAP_COMPARE_TRUE (0x06)
--           LDAP_STRONG_AUTH_NOT_SUPPORTED (0x07)
--           LDAP_STRONG_AUTH_REQUIRED (0x08)
--           LDAP_REFERRAL (0x0a)                            -- new in LDAPv3
--           LDAP_ADMINLIMIT_EXCEEDED (0x0b)                 -- new in LDAPv3
--           LDAP_UNAVAILABLE_CRITICAL_EXTENSION (0x0c)      -- new in LDAPv3
--           LDAP_CONFIDENTIALITY_REQUIRED (0x0d)            -- new in LDAPv3
--           LDAP_NO_SUCH_ATTRIBUTE (0x10)
--           LDAP_UNDEFINED_TYPE (0x11)
--           LDAP_INAPPROPRIATE_MATCHING (0x12)
--           LDAP_CONSTRAINT_VIOLATION (0x13)
--           LDAP_TYPE_OR_VALUE_EXISTS (0x14)
--           LDAP_INVALID_SYNTAX (0x15)
--           LDAP_NO_SUCH_OBJECT (0x20)
--           LDAP_ALIAS_PROBLEM (0x21)
--           LDAP_INVALID_DN_SYNTAX (0x22)
--           LDAP_IS_LEAF (0x23)                             -- not used in LDAPv3
--           LDAP_ALIAS_DEREF_PROBLEM (0x24)
--           LDAP_INAPPROPRIATE_AUTH (0x30)
--           LDAP_INVALID_CREDENTIALS (0x31)
--           LDAP_INSUFFICIENT_ACCESS (0x32)
--           LDAP_BUSY (0x33)
--           LDAP_UNAVAILABLE (0x34)
--           LDAP_UNWILLING_TO_PERFORM (0x35)
--           LDAP_LOOP_DETECT (0x36)
--           LDAP_NAMING_VIOLATION (0x40)
--           LDAP_OBJECT_CLASS_VIOLATION (0x41)
--           LDAP_NOT_ALLOWED_ON_NONLEAF (0x42)
--           LDAP_NOT_ALLOWED_ON_RDN (0x43)
--           LDAP_ALREADY_EXISTS (0x44)
--           LDAP_NO_OBJECT_CLASS_MODS (0x45)
--           LDAP_RESULTS_TOO_LARGE (0x46)
--           LDAP_AFFECTS_MULTIPLE_DSAS (0x47)               -- new in LDAPv3
--           LDAP_OTHER (0x50)
--           LDAP_SERVER_DOWN (0x51)
--
--
--
--Expires: January 1998                                           [Page 5]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           LDAP_LOCAL_ERROR (0x52)
--           LDAP_ENCODING_ERROR (0x53)
--           LDAP_DECODING_ERROR (0x54)
--           LDAP_TIMEOUT (0x55)
--           LDAP_AUTH_UNKNOWN (0x56)
--           LDAP_FILTER_ERROR (0x57)
--           LDAP_USER_CANCELLED (0x58)
--           LDAP_PARAM_ERROR (0x59)
--           LDAP_NO_MEMORY (0x5a)
--           LDAP_CONNECT_ERROR (0x5b)
--           LDAP_NOT_SUPPORTED (0x5c)
--           LDAP_CONTROL_NOT_FOUND (0x5d)
--           LDAP_NO_RESULTS_RETURNED (0x5e)
--           LDAP_MORE_RESULTS_TO_RETURN (0x5f)
--           LDAP_CLIENT_LOOP (0x60)
--           LDAP_REFERRAL_LIMIT_EXCEEDED (0x61)
--
--
--7.  Performing LDAP Operations
--
--This section describes each LDAP operation API call in detail. All func-
--tions take a "session handle," a pointer to an LDAP structure containing
--per-connection information.  Many routines return results in an LDAPMes-
--sage structure. These structures and others are described as needed
--below.
--
--
--7.1.  Initializing an LDAP Session
--
--ldap_init() initializes a session with an LDAP server. The server is not
--actually contacted until an operation is performed that requires it,
--allowing various options to be set after initialization.
--
--        LDAP *ldap_init(
--                char    *hostname,
--                int     portno
--        );
--
--Use of the following routine is deprecated.
--
--        LDAP *ldap_open(
--                char    *hostname,
--                int     portno
--        );
--
--Parameters are:
--
--hostname Contains a space-separated list of hostnames or dotted strings
--
--
--
--Expires: January 1998                                           [Page 6]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--         representing the IP address of hosts running an LDAP server to
--         connect to. Each hostname in the list can include an optional
--         port number which is separated from the host itself with a
--         colon (:) character.  The hosts are tried in the order listed,
--         stopping with the first one to which a successful connection is
--         made. Note that only ldap_open() attempts to make the connec-
--         tion before returning to the caller. ldap_init() does not con-
--         nect to the LDAP server.
--
--portno   Contains the TCP port number to connect to. The default LDAP
--         port of 389 can be obtained by supplying the constant
--         LDAP_PORT.  If a host includes a port number then this parame-
--         ter is ignored.
--
--ldap_init() and ldap_open() both return a "session handle," a pointer to
--an opaque structure that should be passed to subsequent calls pertaining
--to the session. These routines return NULL if the session cannot be ini-
--tialized in which case the operating system error reporting mechanism
--can be checked to see why the call failed.
--
--Note that if you connect to an LDAPv2 server, one of the ldap_bind()
--calls described below must be completed before other operations can be
--performed on the session.  LDAPv3 does not require that a bind operation
--be completed before other operations can be performed.
--
--The calling program can set various attributes of the session by calling
--the routines described in the next section.
--
--
--7.2.  LDAP Session Handle Options
--
--The LDAP session handle returned by ldap_init() is a pointer to an
--opaque data type representing an LDAP session. Formerly, this data type
--was a structure exposed to the caller, and various fields in the struc-
--ture could be set to control aspects of the session, such as size and
--time limits on searches.
--
--In the interest of insulating callers from inevitable changes to this
--structure, these aspects of the session are now accessed through a pair
--of accessor functions, described below.
--
--ldap_get_option() is used to access the current value of various
--session-wide parameters. ldap_set_option() is used to set the value of
--these parameters.
--
--           int ldap_get_option(
--                   LDAP            *ld,
--                   int             option,
--
--
--
--Expires: January 1998                                           [Page 7]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--                   void            *outvalue
--           );
--
--           int ldap_set_option(
--                   LDAP            *ld,
--                   int             option,
--                   void            *invalue
--           );
--
--Parameters are:
--
--ld     The session handle.
--
--option The name of the option being accessed or set. This parameter
--       should be one of the following constants, which have the indi-
--       cated meanings.  After the constant the actual value of the con-
--       stant is listed in hexadecimal in parentheses followed by the
--       type of the corresponding outvalue or invalue parameter.
--
--   LDAP_OPT_DESC (0x01) int *
--          The underlying socket descriptor corresponding to the default
--          LDAP connection.
--
--   LDAP_OPT_DEREF (0x02) int *
--          Controls how aliases are handled during search. It can have
--          one of the following values: LDAP_DEREF_NEVER (0x00),
--          LDAP_DEREF_SEARCHING (0x01), LDAP_DEREF_FINDING (0x02), or
--          LDAP_DEREF_ALWAYS (0x03).  The LDAP_DEREF_SEARCHING value
--          means aliases should be dereferenced during the search but not
--          when locating the base object of the search. The
--          LDAP_DEREF_FINDING value means aliases should be dereferenced
--          when locating the base object but not during the search.
--
--   LDAP_OPT_SIZELIMIT (0x03) int *
--          A limit on the number of entries to return from a search. A
--          value of zero means no limit.
--
--   LDAP_OPT_TIMELIMIT (0x04) int *
--          A limit on the number of seconds to spend on a search. A value
--          of zero means no limit
--
--   LDAP_OPT_REBIND_FN (0x06) function pointer
--          See the discussion of ldap_bind() and friends below.
--
--   LDAP_OPT_REBIND_ARG (0x07) void *
--          See the discussion of ldap_bind() and friends below.
--
--   LDAP_OPT_REFERRALS (0x08) void *
--
--
--
--Expires: January 1998                                           [Page 8]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--          This option controls whether the LDAP library automatically
--          follows referrals returned by LDAP servers or not. It can be
--          set to one of the constants LDAP_OPT_ON or LDAP_OPT_OFF.
--
--   LDAP_OPT_RESTART (0x09) void *
--          This option controls whether LDAP I/O operations should
--          automatically be restarted if they abort prematurely. It
--          should be set to one of the constants LDAP_OPT_ON or
--          LDAP_OPT_OFF. This option is useful if an LDAP I/O operation
--          may be interrupted prematurely, for example by a timer going
--          off, or other interrrupt.
--
--   LDAP_OPT_PROTOCOL_VERSION (0x11) int *
--          This option indicates the version of the default LDAP server.
--          It can be one of the constants LDAP_VERSION2 or LDAP_VERSION3.
--          If no version is set the default is LDAP_VERSION2.
--
--   LDAP_OPT_SERVER_CONTROLS (0x12) LDAPControl **
--          A default list of LDAP server controls to be sent with each
--          request.  See the Using Controls section below.
--
--   LDAP_OPT_CLIENT_CONTROLS (0x13) LDAPControl **
--          A default list of client controls that affect the LDAP ses-
--          sion.  See the Using Controls section below.
--
--   LDAP_OPT_HOST_NAME (0x30) char **
--          The host name of the default LDAP server.
--
--   LDAP_OPT_ERROR_NUMBER (0x31) int *
--          The code of the most recent LDAP error that occurred for this
--          session.
--
--   LDAP_OPT_ERROR_STRING (0x32) char **
--          The message returned with the most recent LDAP error that
--          occurred for this session.
--
--
--outvalue The address of a place to put the value of the option. The
--         actual type of this parameter depends on the setting of the
--         option parameter.
--
--invalue  A pointer to the value the option is to be given. The actual
--         type of this parameter depends on the setting of the option
--         parameter. The constants LDAP_OPT_ON and LDAP_OPT_OFF can be
--         given for options that have on or off settings.
--
--
--
--
--
--
--Expires: January 1998                                           [Page 9]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--7.3.  Working with controls
--
--LDAPv3 operations can be extended through the use of controls.  Controls
--may be sent to a server or returned to the client with any LDAP message.
--These controls are referred to as server controls.
--
--The LDAP API also supports a client-side extension mechanism through the
--use of client controls. These controls affect the behavior of the LDAP
--API only and are never sent to a server.  A common data structure is
--used to represent both types of controls:
--
--           typedef struct ldapcontrol {
--                   char            *ldctl_oid;
--                   struct berval   ldctl_value;
--                   char            ldctl_iscritical;
--           } LDAPControl, *PLDAPControl;
--
--The fields in the ldapcontrol structure have the following meanings:
--
--ldctl_oid        The control type, represented as a string.
--
--ldctl_value      The data associated with the control (if any).
--
--ldctl_iscritical Indicates whether the control is critical of not. If
--                 this field is non-zero, the operation will only be car-
--                 ried out if the control is recognized by the server
--                 and/or client.
--
--Some LDAP API calls allocate an ldapcontrol structure or a NULL-
--terminated array of ldapcontrol structures.  The following routines can
--be used to dispose of a single control or an array of controls:
--
--           void ldap_control_free( LDAPControl *ctrl );
--           void ldap_controls_free( LDAPControl **ctrls );
--
--A set of controls that affect the entire session can be set using the
--ldap_set_option() function (see above).  A list of controls can also be
--passed directly to some LDAP API calls such as ldap_search_ext(), in
--which case any controls set for the session through the use of
--ldap_set_option() are ignored. Control lists are represented as a NULL-
--terminated array of pointers to ldapcontrol structures.
--
--Server controls are defined by LDAPv3 protocol extension documents; for
--example, a control has been proposed to support server-side sorting of
--search results [7].
--
--No client controls are defined by this document but they may be defined
--in future revisions or in any document that extends this API.
--
--
--
--Expires: January 1998                                          [Page 10]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--7.4.  Authenticating to the directory
--
--The following functions are used to authenticate an LDAP client to an
--LDAP directory server.
--
--The ldap_sasl_bind() and ldap_sasl_bind_s() functions can be used to do
--general and extensible authentication over LDAP through the use of the
--Simple Authentication Security Layer [8].  The routines both take the dn
--to bind as, the method to use, as a dotted-string representation of an
--OID identifying the method, and a struct berval holding the credentials.
--The special constant value LDAP_SASL_SIMPLE ("") can be passed to
--request simple authentication, or the simplified routines
--ldap_simple_bind() or ldap_simple_bind_s() can be used.
--
--           int ldap_sasl_bind(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *mechanism,
--                   struct berval   *cred,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--           );
--
--           int ldap_sasl_bind_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *mechanism,
--                   struct berval   *cred,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   struct berval   **servercredp
--           );
--
--           int ldap_simple_bind(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *passwd
--           );
--
--           int ldap_simple_bind_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *passwd
--           );
--
--   The use of the following routines is deprecated:
--
--
--
--
--Expires: January 1998                                          [Page 11]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           int ldap_bind( LDAP *ld, char *dn, char *cred, int method );
--
--           int ldap_bind_s( LDAP *ld, char *dn, char *cred, int method );
--
--           int ldap_kerberos_bind( LDAP *ld, char *dn );
--
--           int ldap_kerberos_bind_s( LDAP *ld, char *dn );
--
--Parameters are:
--
--ld           The session handle.
--
--dn           The name of the entry to bind as.
--
--mechanism    Either LDAP_AUTH_SIMPLE_OID to get simple authentication,
--             or a dotted text string representing an OID identifying the
--             SASL method.
--
--cred         The credentials with which to authenticate. Arbitrary
--             credentials can be passed using this parameter. The format
--             and content of the credentials depends on the setting of
--             the mechanism parameter.
--
--passwd       For ldap_simple_bind(), the password to compare to the
--             entry's userPassword attribute.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_sasl_bind() call succeeds.
--
--servercredp  This result parameter will be set to the credentials
--             returned by the server.  This should be freed by calling
--             ldap_If no credentials are returned it will be set to NULL.
--
--Additional parameters for the deprecated routines are not described.
--Interested readers are referred to RFC 1823.
--
--The ldap_sasl_bind() function initiates an asynchronous bind operation
--and returns the constant LDAP_SUCCESS if the request was successfully
--sent, or another LDAP error code if not.  See the section below on error
--handling for more information about possible errors and how to interpret
--them.  If successful, ldap_sasl_bind() places the message id of the
--request in *msgidp. A subsequent call to ldap_result(), described below,
--can be used to obtain the result of the bind.
--
--
--
--
--Expires: January 1998                                          [Page 12]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--The ldap_simple_bind() function initiates a simple asynchronous bind
--operation and returns the message id of the operation initiated.  A sub-
--sequent call to ldap_result(), described below, can be used to obtain
--the result of the bind. In case of error, ldap_simple_bind() will return
---1, setting the session error parameters in the LDAP structure appropri-
--ately.
--
--The synchronous ldap_sasl_bind_s() and ldap_simple_bind_s() functions
--both return the result of the operation, either the constant
--LDAP_SUCCESS if the operation was successful, or another LDAP error code
--if it was not. See the section below on error handling for more informa-
--tion about possible errors and how to interpret them.
--
--Note that if an LDAPv2 server is contacted, no other operations over the
--connection should be attempted before a bind call has successfully com-
--pleted.
--
--Subsequent bind calls can be used to re-authenticate over the same con-
--nection, and multistep SASL sequences can be accomplished through a
--sequence of calls to ldap_sasl_bind() or ldap_sasl_bind_s().
--
--
--7.5.  Closing the session
--
--The following functions are used to unbind from the directory, close the
--connection, and dispose of the session handle.
--
--           int ldap_unbind( LDAP *ld );
--
--           int ldap_unbind_s( LDAP *ld );
--
--Parameters are:
--
--ld   The session handle.
--
--ldap_unbind() and ldap_unbind_s() both work synchronously, unbinding
--from the directory, closing the connection, and freeing up the ld struc-
--ture before returning.   There is no server response to an unbind opera-
--tion.  ldap_unbind() returns LDAP_SUCCESS (or another LDAP error code if
--the request cannot be sent to the LDAP server).  After a call to
--ldap_unbind() or ldap_unbind_s(), the session handle ld is invalid.
--
--
--7.6.  Searching
--
--The following functions are used to search the LDAP directory, returning
--a requested set of attributes for each entry matched.  There are five
--variations.
--
--
--
--Expires: January 1998                                          [Page 13]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           int ldap_search_ext(
--                   LDAP            *ld,
--                   char            *base,
--                   int             scope,
--                   char            *filter,
--                   char            **attrs,
--                   int             attrsonly,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   struct timeval  *timeoutp,
--                   int             sizelimit,
--                   int             *msgidp
--           );
--
--           int ldap_search_ext_s(
--                   LDAP            *ld,
--                   char            *base,
--                   int             scope,
--                   char            *filter,
--                   char            **attrs,
--                   int             attrsonly,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   struct timeval  *timeoutp,
--                   int             sizelimit,
--                   LDAPMessage     **res
--           );
--
--           int ldap_search(
--                   LDAP    *ld,
--                   char    *base,
--                   int     scope,
--                   char    *filter,
--                   char    **attrs,
--                   int     attrsonly
--           );
--
--           int ldap_search_s(
--                   LDAP            *ld,
--                   char            *base,
--                   int             scope,
--                   char            *filter,
--                   char            **attrs,
--                   int             attrsonly,
--                   LDAPMessage     **res
--           );
--
--           int ldap_search_st(
--
--
--
--Expires: January 1998                                          [Page 14]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--                   LDAP            *ld,
--                   char            *base,
--                   int             scope,
--                   char            *filter,
--                   char            **attrs,
--                   int             attrsonly,
--                   struct timeval  *timeout,
--                   LDAPMessage     **res
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--base         The dn of the entry at which to start the search.
--
--scope        One of LDAP_SCOPE_BASE (0x00), LDAP_SCOPE_ONELEVEL (0x01),
--             or LDAP_SCOPE_SUBTREE (0x02), indicating the scope of the
--             search.
--
--filter       A character string as described in [3], representing the
--             search filter.
--
--attrs        A NULL-terminated array of strings indicating which attri-
--             butes to return for each matching entry. Passing NULL for
--             this parameter causes all available attributes to be
--             retrieved.
--
--attrsonly    A boolean value that should be zero if both attribute types
--             and values are to be returned, non-zero if only types are
--             wanted.
--
--timeout      For the ldap_search_st() function, this specifies the local
--             search timeout value.  For the ldap_search_ext() and
--             ldap_search_ext_s() functions, this specifies both the
--             local search timeout value and the operation time limit
--             that is sent to the server within the search request.
--
--res          For the synchronous calls, this is a result parameter which
--             will contain the results of the search upon completion of
--             the call.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_search_ext() call succeeds.
--
--
--
--Expires: January 1998                                          [Page 15]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--There are three options in the session handle ld which potentially
--affect how the search is performed. They are:
--
--LDAP_OPT_SIZELIMIT
--             A limit on the number of entries to return from the search.
--             A value of zero means no limit.  Note that the value from
--             the session handle is ignored when using the
--             ldap_search_ext() or ldap_search_ext_s() functions.
--
--LDAP_OPT_TIMELIMIT
--             A limit on the number of seconds to spend on the search. A
--             value of zero means no limit.  Note that the value from the
--             session handle is ignored when using the ldap_search_ext()
--             or ldap_search_ext_s() functions.
--
--LDAP_OPT_DEREF
--             One of LDAP_DEREF_NEVER (0x00), LDAP_DEREF_SEARCHING
--             (0x01), LDAP_DEREF_FINDING (0x02), or LDAP_DEREF_ALWAYS
--             (0x03), specifying how aliases should be handled during the
--             search. The LDAP_DEREF_SEARCHING value means aliases should
--             be dereferenced during the search but not when locating the
--             base object of the search. The LDAP_DEREF_FINDING value
--             means aliases should be dereferenced when locating the base
--             object but not during the search.
--
--The ldap_search_ext() function initiates an asynchronous search opera-
--tion and returns the constant LDAP_SUCCESS if the request was success-
--fully sent, or another LDAP error code if not.  See the section below on
--error handling for more information about possible errors and how to
--interpret them.  If successful, ldap_search_ext() places the message id
--of the request in *msgidp. A subsequent call to ldap_result(), described
--below, can be used to obtain the results from the search.  These results
--can be parsed using the result parsing routines described in detail
--later.
--
--Similar to ldap_search_ext(), the ldap_search() function initiates an
--asynchronous search operation and returns the message id of the opera-
--tion initiated.  As for ldap_search_ext(), a subsequent call to
--ldap_result(), described below, can be used to obtain the result of the
--bind. In case of error, ldap_search() will return -1, setting the ses-
--sion error parameters in the LDAP structure appropriately.
--
--The synchronous ldap_search_ext_s(), ldap_search_s(), and
--ldap_search_st() functions all return the result of the operation,
--either the constant LDAP_SUCCESS if the operation was successful, or
--another LDAP error code if it was not. See the section below on error
--handling for more information about possible errors and how to interpret
--them.  Entries returned from the search (if any) are contained in the
--
--
--
--Expires: January 1998                                          [Page 16]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--res parameter. This parameter is opaque to the caller.  Entries, attri-
--butes, values, etc., should be extracted by calling the parsing routines
--described below. The results contained in res should be freed when no
--longer in use by calling ldap_msgfree(), described later.
--
--The ldap_search_ext() and ldap_search_ext_s() functions support LDAPv3
--server controls, client controls, and allow varying size and time limits
--to be easily specified for each search operation.  The ldap_search_st()
--function is identical to ldap_search_s() except that it takes an addi-
--tional parameter specifying a local timeout for the search.
--
--7.7.  Reading an Entry
--
--LDAP does not support a read operation directly. Instead, this operation
--is emulated by a search with base set to the DN of the entry to read,
--scope set to LDAP_SCOPE_BASE, and filter set to "(objectclass=*)". attrs
--contains the list of attributes to return.
--
--
--7.8.  Listing the Children of an Entry
--
--LDAP does not support a list operation directly. Instead, this operation
--is emulated by a search with base set to the DN of the entry to list,
--scope set to LDAP_SCOPE_ONELEVEL, and filter set to "(objectclass=*)".
--attrs contains the list of attributes to return for each child entry.
--
--7.9.  Comparing a Value Against an Entry
--
--The following routines are used to compare a given attribute value
--assertion against an LDAP entry.  There are four variations:
--
--           int ldap_compare_ext(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *attr,
--                   struct berval   *bvalue
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--           );
--
--           int ldap_compare_ext_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *attr,
--                   struct berval   *bvalue,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls
--
--
--
--Expires: January 1998                                          [Page 17]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           );
--
--           int ldap_compare(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *attr,
--                   char            *value
--           );
--
--           int ldap_compare_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *attr,
--                   char            *value
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--dn           The name of the entry to compare against.
--
--attr         The attribute to compare against.
--
--bvalue       The attribute value to compare against those found in the
--             given entry. This parameter is used in the extended rou-
--             tines and is a pointer to a struct berval so it is possible
--             to compare binary values.
--
--value        A string attribute value to compare against, used by the
--             ldap_compare() and ldap_compare_s() functions.  Use
--             ldap_compare_ext() or ldap_compare_ext_s() if you need to
--             compare binary values.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_compare_ext() call succeeds.
--
--The ldap_compare_ext() function initiates an asynchronous compare opera-
--tion and returns the constant LDAP_SUCCESS if the request was success-
--fully sent, or another LDAP error code if not.  See the section below on
--error handling for more information about possible errors and how to
--interpret them.  If successful, ldap_compare_ext() places the message id
--of the request in *msgidp. A subsequent call to ldap_result(), described
--below, can be used to obtain the result of the compare.
--
--
--
--Expires: January 1998                                          [Page 18]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--Similar to ldap_compare_ext(), the ldap_compare() function initiates an
--asynchronous compare operation and returns the message id of the opera-
--tion initiated.  As for ldap_compare_ext(), a subsequent call to
--ldap_result(), described below, can be used to obtain the result of the
--bind. In case of error, ldap_compare() will return -1, setting the ses-
--sion error parameters in the LDAP structure appropriately.
--
--The synchronous ldap_compare_ext_s() and ldap_compare_s() functions both
--return the result of the operation, either the constant LDAP_SUCCESS if
--the operation was successful, or another LDAP error code if it was not.
--See the section below on error handling for more information about pos-
--sible errors and how to interpret them.
--
--The ldap_compare_ext() and ldap_compare_ext_s() functions support LDAPv3
--server controls and client controls.
--
--
--7.10.  Modifying an entry
--
--The following routines are used to modify an existing LDAP entry.  There
--are four variations:
--
--           typedef struct ldapmod {
--                   int             mod_op;
--                   char            *mod_type;
--                   union {
--                           char            **modv_strvals;
--                           struct berval   **modv_bvals;
--                   } mod_vals;
--           } LDAPMod;
--           #define mod_values      mod_vals.modv_strvals
--           #define mod_bvalues     mod_vals.modv_bvals
--
--           int ldap_modify_ext(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **mods,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--           );
--
--           int ldap_modify_ext_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **mods,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls
--
--
--
--Expires: January 1998                                          [Page 19]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           );
--
--           int ldap_modify(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **mods
--           );
--
--           int ldap_modify_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **mods
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--dn           The name of the entry to modify.
--
--mods         A NULL-terminated array of modifications to make to the
--             entry.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_modify_ext() call succeeds.
--
--The fields in the LDAPMod structure have the following meanings:
--
--mod_op       The modification operation to perform. It should be one of
--             LDAP_MOD_ADD (0x00), LDAP_MOD_DELETE (0x01), or
--             LDAP_MOD_REPLACE (0x02).  This field also indicates the
--             type of values included in the mod_vals union. It is logi-
--             cally ORed with LDAP_MOD_BVALUES (0x80) to select the
--             mod_bvalues form. Otherwise, the mod_values form is used.
--
--mod_type     The type of the attribute to modify.
--
--mod_vals     The values (if any) to add, delete, or replace. Only one of
--             the mod_values or mod_bvalues variants should be used,
--             selected by ORing the mod_op field with the constant
--             LDAP_MOD_BVALUES. mod_values is a NULL-terminated array of
--             zero-terminated strings and mod_bvalues is a NULL-
--             terminated array of berval structures that can be used to
--             pass binary values such as images.
--
--
--
--Expires: January 1998                                          [Page 20]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--For LDAP_MOD_ADD modifications, the given values are added to  the
--entry, creating the attribute if necessary.
--
--For LDAP_MOD_DELETE modifications, the given values are deleted from the
--entry, removing the attribute if no values remain. If the entire attri-
--bute is to  be deleted, the mod_vals field should be set to NULL.
--
--For LDAP_MOD_REPLACE modifications, the attribute will have the listed
--values after the modification, having been created if necessary, or
--removed if the mod_vals field is NULL. All modifications are performed
--in the order in which they are listed.
--
--The ldap_modify_ext() function initiates an asynchronous modify opera-
--tion and returns the constant LDAP_SUCCESS if the request was success-
--fully sent, or another LDAP error code if not.  See the section below on
--error handling for more information about possible errors and how to
--interpret them.  If successful, ldap_modify_ext() places the message id
--of the request in *msgidp. A subsequent call to ldap_result(), described
--below, can be used to obtain the result of the modify.
--
--Similar to ldap_modify_ext(), the ldap_modify() function initiates an
--asynchronous modify operation and returns the message id of the opera-
--tion initiated.  As for ldap_modify_ext(), a subsequent call to
--ldap_result(), described below, can be used to obtain the result of the
--modify. In case of error, ldap_modify() will return -1, setting the ses-
--sion error parameters in the LDAP structure appropriately.
--
--The synchronous ldap_modify_ext_s() and ldap_modify_s() functions both
--return the result of the operation, either the constant LDAP_SUCCESS if
--the operation was successful, or another LDAP error code if it was not.
--See the section below on error handling for more information about pos-
--sible errors and how to interpret them.
--
--The ldap_modify_ext() and ldap_modify_ext_s() functions support LDAPv3
--server controls and client controls.
--
--
--7.11.  Modifying the Name of an Entry
--
--In LDAPv2, the ldap_modrdn() and ldap_modrdn_s() routines were used to
--change the name of an LDAP entry. They could only be used to change the
--least significant component of a name (the RDN or relative distinguished
--name). LDAPv3 provides the Modify DN protocol operation that allows more
--general name change access. The ldap_rename() and ldap_rename_s() rou-
--tines are used to change the name of an entry, and the use of the
--ldap_modrdn() and ldap_modrdn_s() routines is deprecated.
--
--           int ldap_rename(
--
--
--
--Expires: January 1998                                          [Page 21]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *newrdn,
--                   char            *newparent,
--                   int             deleteoldrdn,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--
--           );
--           int ldap_rename_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   char            *newrdn,
--                   char            *newparent,
--                   int             deleteoldrdn,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls
--           );
--
--   Use of the following routines is deprecated.
--
--           int ldap_modrdn(
--                   LDAP    *ld,
--                   char    *dn,
--                   char    *newrdn,
--                   int     deleteoldrdn
--           );
--           int ldap_modrdn_s(
--                   LDAP    *ld,
--                   char    *dn,
--                   char    *newrdn,
--                   int     deleteoldrdn
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--dn           The name of the entry whose DN is to be changed.
--
--newrdn       The new RDN to give the entry.
--
--newparent    The new parent, or superior entry.  If this parameter is
--             NULL, only the RDN of the entry is changed.  The root DN
--             may be specified by passing a zero length string, "".  The
--             newparent parameter should always be NULL when using ver-
--             sion 2 of the LDAP protocol; otherwise the server's
--
--
--
--Expires: January 1998                                          [Page 22]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--             behavior is undefined.
--
--deleteoldrdn This parameter only has meaning on the rename routines if
--             newrdn is different than the old RDN. It is a boolean
--             value, if non-zero indicating that the old RDN value(s)
--             should be removed, if zero indicating that the old RDN
--             value(s) should be retained as non-distinguished values of
--             the entry.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_rename() call succeeds.
--
--The ldap_rename() function initiates an asynchronous modify DN operation
--and returns the constant LDAP_SUCCESS if the request was successfully
--sent, or another LDAP error code if not.  See the section below on error
--handling for more information about possible errors and how to interpret
--them.  If successful, ldap_rename() places the DN message id of the
--request in *msgidp. A subsequent call to ldap_result(), described below,
--can be used to obtain the result of the rename.
--
--The synchronous ldap_rename_s() returns the result of the operation,
--either the constant LDAP_SUCCESS if the operation was successful, or
--another LDAP error code if it was not.  See the section below on error
--handling for more information about possible errors and how to interpret
--them.
--
--The ldap_rename() and ldap_rename_s() functions both support LDAPv3
--server controls and client controls.
--
--
--7.12.  Adding an entry
--
--The following functions are used to add entries to the LDAP directory.
--There are four variations:
--
--           int ldap_add_ext(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **attrs,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--           );
--
--
--
--
--Expires: January 1998                                          [Page 23]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           int ldap_add_ext_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **attrs,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls
--           );
--
--           int ldap_add(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **attrs
--           );
--
--           int ldap_add_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPMod         **attrs
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--dn           The name of the entry to add.
--
--attrs        The entry's attributes, specified using the LDAPMod struc-
--             ture defined for ldap_modify(). The mod_type and mod_vals
--             fields should be filled in.  The mod_op field is ignored
--             unless ORed with the constant LDAP_MOD_BVALUES, used to
--             select the mod_bvalues case of the mod_vals union.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_add_ext() call succeeds.
--
--Note that the parent of the entry being added must already exist or the
--parent must be empty (i.e., equal to the root DN) for an add to succeed.
--
--The ldap_add_ext() function initiates an asynchronous add operation and
--returns the constant LDAP_SUCCESS if the request was successfully sent,
--or another LDAP error code if not.  See the section below on error han-
--dling for more information about possible errors and how to interpret
--them.  If successful, ldap_add_ext() places the message id of the
--request in *msgidp. A subsequent call to ldap_result(), described below,
--
--
--
--Expires: January 1998                                          [Page 24]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--can be used to obtain the result of the add.
--
--Similar to ldap_add_ext(), the ldap_add() function initiates an asyn-
--chronous add operation and returns the message id of the operation ini-
--tiated.  As for ldap_add_ext(), a subsequent call to ldap_result(),
--described below, can be used to obtain the result of the add. In case of
--error, ldap_add() will return -1, setting the session error parameters
--in the LDAP structure appropriately.
--
--The synchronous ldap_add_ext_s() and ldap_add_s() functions both return
--the result of the operation, either the constant LDAP_SUCCESS if the
--operation was successful, or another LDAP error code if it was not.  See
--the section below on error handling for more information about possible
--errors and how to interpret them.
--
--The ldap_add_ext() and ldap_add_ext_s() functions support LDAPv3 server
--controls and client controls.
--
--
--
--7.13.  Deleting an entry
--
--The following functions are used to delete a leaf entry from the LDAP
--directory.  There are four variations:
--
--           int ldap_delete_ext(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--           );
--
--           int ldap_delete_ext_s(
--                   LDAP            *ld,
--                   char            *dn,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls
--           );
--
--           int ldap_delete(
--                   LDAP            *ld,
--                   char            *dn
--           );
--
--           int ldap_delete_s(
--                   LDAP            *ld,
--                   char            *dn
--
--
--
--Expires: January 1998                                          [Page 25]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--dn           The name of the entry to delete.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_delete_ext() call succeeds.
--
--Note that the entry to delete must be a leaf entry (i.e., it must have
--no children). Deletion of entire subtrees in a single operation is not
--supported by LDAP.
--
--The ldap_delete_ext() function initiates an asynchronous delete opera-
--tion and returns the constant LDAP_SUCCESS if the request was success-
--fully sent, or another LDAP error code if not.  See the section below on
--error handling for more information about possible errors and how to
--interpret them.  If successful, ldap_delete_ext() places the message id
--of the request in *msgidp. A subsequent call to ldap_result(), described
--below, can be used to obtain the result of the delete.
--
--Similar to ldap_delete_ext(), the ldap_delete() function initiates an
--asynchronous delete operation and returns the message id of the opera-
--tion initiated.  As for ldap_delete_ext(), a subsequent call to
--ldap_result(), described below, can be used to obtain the result of the
--delete. In case of error, ldap_delete() will return -1, setting the ses-
--sion error parameters in the LDAP structure appropriately.
--
--The synchronous ldap_delete_ext_s() and ldap_delete_s() functions both
--return the result of the operation, either the constant LDAP_SUCCESS if
--the operation was successful, or another LDAP error code if it was not.
--See the section below on error handling for more information about pos-
--sible errors and how to interpret them.
--
--The ldap_delete_ext() and ldap_delete_ext_s() functions support LDAPv3
--server controls and client controls.
--
--
--7.14.  Extended Operations
--
--The ldap_extended_operation() and ldap_extended_operation_s() routines
--allow extended LDAP operations to be passed to the server, providing a
--
--
--
--Expires: January 1998                                          [Page 26]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--general protocol extensibility mechanism.
--
--           int ldap_extended_operation(
--                   LDAP            *ld,
--                   char            *exoid,
--                   struct berval   *exdata,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   int             *msgidp
--           );
--
--           int ldap_extended_operation_s(
--                   LDAP            *ld,
--                   char            *exoid,
--                   struct berval   *exdata,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls,
--                   char            **retoidp,
--                   struct berval   **retdatap
--           );
--
--Parameters are:
--
--ld           The session handle.
--
--requestoid   The dotted-OID text string naming the request.
--
--requestdata  The arbitrary data required by the operation (if NULL, no
--             data is sent to the server).
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--msgidp       This result parameter will be set to the message id of the
--             request if the ldap_extended_operation() call succeeds.
--
--retoidp      Pointer to a character string that will be set to an allo-
--             cated, dotted-OID text string returned by the server.  This
--             string should be disposed of using the ldap_memfree() func-
--             tion.  If no OID was returned, *retoidp is set to NULL.
--
--retdatap     Pointer to a berval structure pointer that will be set an
--             allocated copy of the data returned by the server.  This
--             struct berval should be disposed of using ber_bvfree().  If
--             no data is returned, *retdatap is set to NULL.
--
--The ldap_extended_operation() function initiates an asynchronous
--
--
--
--Expires: January 1998                                          [Page 27]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--extended operation and returns the constant LDAP_SUCCESS if the request
--was successfully sent, or another LDAP error code if not.  See the sec-
--tion below on error handling for more information about possible errors
--and how to interpret them.  If successful, ldap_extended_operation()
--places the message id of the request in *msgidp. A subsequent call to
--ldap_result(), described below, can be used to obtain the result of the
--extended operation which can be passed to ldap_parse_extended_result()
--to obtain the OID and data contained in the response.
--
--The synchronous ldap_extended_operation_s() function returns the result
--of the operation, either the constant LDAP_SUCCESS if the operation was
--successful, or another LDAP error code if it was not.  See the section
--below on error handling for more information about possible errors and
--how to interpret them.  The retoid and retdata parameters are filled in
--with the OID and data from the response.  If no OID or data was
--returned, these parameters are set to NULL.
--
--The ldap_extended_operation() and ldap_extended_operation_s() functions
--both support LDAPv3 server controls and client controls.
--
--
--8.  Abandoning An Operation
--
--The following calls are used to abandon an operation in progress:
--
--           int ldap_abandon_ext(
--                   LDAP            *ld,
--                   int             msgid,
--                   LDAPControl     **serverctrls,
--                   LDAPControl     **clientctrls
--           );
--
--           int ldap_abandon(
--                   LDAP            *ld,
--                   int             msgid
--           );
--
--
--ld           The session handle.
--
--msgid        The message id of the request to be abandoned.
--
--serverctrls  List of LDAP server controls.
--
--clientctrls  List of client controls.
--
--ldap_abandon_ext() abandons the operation with message id msgid and
--returns the constant LDAP_SUCCESS if the abandon was successful or
--
--
--
--Expires: January 1998                                          [Page 28]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--another LDAP error code if not.  See the section below on error handling
--for more information about possible errors and how to interpret them.
--
--ldap_abandon() is identical to ldap_abandon_ext() except that it returns
--zero if the abandon was successful, -1 otherwise and does not support
--LDAPv3 server controls or client controls.
--
--After a successful call to ldap_abandon() or ldap_abandon_ext(), results
--with the given message id are never returned from a subsequent call to
--ldap_result().  There is no server response to LDAP abandon operations.
--
--
--9.  Obtaining Results and Peeking Inside LDAP Messages
--
--ldap_result() is used to obtain the result of a previous asynchronously
--initiated operation. Note that depending on how it is called,
--ldap_result() may actually return a list or "chain" of messages.
--
--ldap_msgfree() frees the results obtained from a previous call to
--ldap_result(), or a synchronous search routine.
--
--ldap_msgtype() returns the type of an LDAP message.  ldap_msgid()
--returns the message ID of an LDAP message.
--
--           int ldap_result(
--                   LDAP            *ld,
--                   int             msgid,
--                   int             all,
--                   struct timeval  *timeout,
--                   LDAPMessage     **res
--           );
--
--           int ldap_msgfree( LDAPMessage *res );
--
--           int ldap_msgtype( LDAPMessage *res );
--
--           int ldap_msgid( LDAPMessage *res );
--
--Parameters are:
--
--ld       The session handle.
--
--msgid    The message id of the operation whose results are to be
--         returned, or the constant LDAP_RES_ANY (-1) if any result is
--         desired.
--
--all      Specifies how many messages will be retrieved in a single call
--         to ldap_result().  This parameter only has meaning for search
--
--
--
--Expires: January 1998                                          [Page 29]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--         results.  Pass the constant LDAP_MSG_ONE (0x00) to retrieve one
--         message at a time.  Pass LDAP_MSG_ALL (0x01) to request that
--         all results of a search be received before returning all
--         results in a single chain.  Pass LDAP_MSG_RECEIVED (0x02) to
--         indicate that all results retrieved so far should be returned
--         in the result chain.
--
--timeout  A timeout specifying how long to wait for results to be
--         returned.  A NULL value causes ldap_result() to block until
--         results are available.  A timeout value of zero seconds speci-
--         fies a polling behavior.
--
--res      For ldap_result(), a result parameter that will contain the
--         result(s) of the operation. For ldap_msgfree(), the result
--         chain to be freed, obtained from a previous call to
--         ldap_result(), ldap_search_s(), or ldap_search_st().
--
--Upon successful completion, ldap_result() returns the type of the first
--result returned in the res parameter. This will be one of the following
--constants.
--
--             LDAP_RES_BIND (0x61)
--             LDAP_RES_SEARCH_ENTRY (0x64)
--             LDAP_RES_SEARCH_REFERENCE (0x73)      -- new in LDAPv3
--             LDAP_RES_SEARCH_RESULT (0x65)
--             LDAP_RES_MODIFY (0x67)
--             LDAP_RES_ADD (0x69)
--             LDAP_RES_DELETE (0x6B)
--             LDAP_RES_MODDN (0x6D)
--             LDAP_RES_COMPARE (0x6F)
--             LDAP_RES_EXTENDED (0x78)              -- new in LDAPv3
--
--ldap_result() returns 0 if the timeout expired and -1 if an error
--occurs, in which case the error parameters of the LDAP session handle
--will be set accordingly.
--
--ldap_msgfree() frees the result structure pointed to by res and returns
--the type of the message it freed.
--
--ldap_msgtype() returns the type of the LDAP message it is passed as a
--parameter. The type will be one of the types listed above, or -1 on
--error.
--
--ldap_msgid() returns the message ID associated with the LDAP message
--passed as a parameter.
--
--
--
--
--
--
--Expires: January 1998                                          [Page 30]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--10.  Handling Errors and Parsing Results
--
--The following calls are used to extract information from results and
--handle errors returned by other LDAP API routines.
--
--           int ldap_parse_result(
--                   LDAP            *ld,
--                   LDAPMessage     *res,
--                   int             *errcodep,
--                   char            **matcheddnp,
--                   char            **errmsgp,
--                   char            ***referralsp,
--                   LDAPControl     ***serverctrlsp,
--                   int             freeit
--           );
--
--           int ldap_parse_sasl_bind_result(
--                   LDAP            *ld,
--                   LDAPMessage     *res,
--                   struct berval   **servercredp,
--                   int             freeit
--           );
--
--           int ldap_parse_extended_result(
--                   LDAP            *ld,
--                   LDAPMessage     *res,
--                   char            **resultoidp,
--                   struct berval   **resultdata,
--                   int             freeit
--           );
--
--           char *ldap_err2string( int err );
--
--   The use of the following routines is deprecated.
--
--           int ldap_result2error(
--                   LDAP            *ld,
--                   LDAPMessage     *res,
--                   int             freeit
--           );
--
--           void ldap_perror( LDAP *ld, char *msg );
--
--Parameters are:
--
--ld           The session handle.
--
--res          The result of an LDAP operation as returned by
--
--
--
--Expires: January 1998                                          [Page 31]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--             ldap_result() or one of the synchronous API operation
--             calls.
--
--errcodep     This result parameter will be filled in with the LDAP error
--             code field from the LDAPResult message.  This is the indi-
--             cation from the server of the outcome of the operation.
--             NULL may be passed to ignore this field.
--
--matcheddnp   In the case of a return of LDAP_NO_SUCH_OBJECT, this result
--             parameter will be filled in with a DN indicating how much
--             of the name in the request was recognized. NULL may be
--             passed to ignore this field.  The matched DN string should
--             be freed by calling ldap_memfree() which is described later
--             in this document.
--
--errmsgp      This result parameter will be filled in with the contents
--             of the error message field from the LDAPResult message.
--             The error message string should be freed by calling
--             ldap_memfree() which is described later in this document.
--             NULL may be passed to ignore this field.
--
--referralsp   This result parameter will be filled in with the contents
--             of the referrals field from the LDAPResult message, indi-
--             cating zero or more alternate LDAP servers where the
--             request should be retried.  The referrals array should be
--             freed by calling ldap_value_free() which is described later
--             in this document.  NULL may be passed to ignore this field.
--
--serverctrlsp This result parameter will be filled in with an allocated
--             array of controls copied out of the LDAPResult message.
--             The control array should be freed by calling
--             ldap_controls_free() which was described earlier.
--
--freeit       A boolean that determines whether the res parameter is
--             disposed of or not.  Pass any non-zero value to have these
--             routines free res after extracting the requested informa-
--             tion.  This is provided as a convenience; you can also use
--             ldap_msgfree() to free the result later.
--
--servercredp  For SASL bind results, this result parameter will be filled
--             in with the credentials passed back by the server for
--             mutual authentication, if given. An allocated berval struc-
--             ture is returned that should be disposed of by calling
--             ldap_ber_free().  NULL may be passed to ignore this field.
--
--resultoidp   For extended results, this result parameter will be filled
--             in with the dotted-OID text representation of the name of
--             the extended operation response.  This string should be
--
--
--
--Expires: January 1998                                          [Page 32]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--             disposed of by calling ldap_memfree().  NULL may be passed
--             to ignore this field.
--
--resultdatap  For extended results, this result parameter will be filled
--             in with a pointer to a struct berval containing the data in
--             the extended operation response.  It should be disposed of
--             by calling ldap_ber_free(). NULL may be passed to ignore
--             this field.
--
--err          For ldap_err2string(), an LDAP error code, as returned by
--             ldap_result2error() or another LDAP API call.
--
--Additional parameters for the deprecated routines are not described.
--Interested readers are referred to RFC 1823.
--
--All of the ldap_parse_*_result() routines skip over messages of type
--LDAP_RES_SEARCH_ENTRY and LDAP_RES_SEARCH_REFERENCE when looking for a
--result message to parse.  They return the constant LDAP_SUCCESS if the
--result was successfully parsed and another LDAP error code if not.  Note
--that the LDAP error code that indicates the outcome of the operation
--performed by the server is placed in the errcodep ldap_parse_result()
--parameter.
--
--ldap_err2string() is used to convert a numeric LDAP error code, as
--returned by one of the ldap_parse_*_result() routines, or one of the
--synchronous API operation calls, into an informative NULL-terminated
--character string message describing the error.  It returns a pointer to
--static data.
--
--
--11.  Stepping Through a List of Results
--
--The ldap_first_message() and ldap_next_message() routines are used to
--step through the list of messages in a result chain returned by
--ldap_result().  For search operations, the result chain may actually
--include referral messages, entry messages, and result messages.
--ldap_count_messages() is used to count the number of messages returned.
--The ldap_msgtype() function, described above, can be used to distinguish
--between the different message types.
--
--           LDAPMessage *ldap_first_message( LDAP *ld, LDAPMessage *res );
--
--           LDAPMessage *ldap_next_message( LDAP *ld, LDAPMessage *msg );
--
--           int ldap_count_messages( LDAP *ld, LDAPMessage *res );
--
--Parameters are:
--
--
--
--
--Expires: January 1998                                          [Page 33]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--ld     The session handle.
--
--res    The result chain, as obtained by a call to one of the synchronous
--       search routines or ldap_result().
--
--msg    The message returned by a previous call to ldap_first_message()
--       or ldap_next_message().
--
--ldap_first_message() and ldap_next_message() will return NULL when no
--more messages exist in the result set to be returned.  NULL is also
--returned if an error occurs while stepping through the entries, in which
--case the error parameters in the session handle ld will be set to indi-
--cate the error.
--
--ldap_count_messages() returns the number of messages contained in a
--chain of results. It can also be used to count the number of messages
--that remain in a chain if called with a message, entry, or reference
--returned by ldap_first_message(), ldap_next_message(),
--ldap_first_entry(), ldap_next_entry(), ldap_first_reference(),
--ldap_next_reference().
--
--
--12.  Parsing Search Results
--
--The following calls are used to parse the entries and references
--returned by ldap_search() and friends. These results are returned in an
--opaque structure that should only be accessed by calling the routines
--described below. Routines are provided to step through the entries and
--references returned, step through the attributes of an entry, retrieve
--the name of an entry, and retrieve the values associated with a given
--attribute in an entry.
--
--
--12.1.  Stepping Through a List of Entries
--
--The ldap_first_entry() and ldap_next_entry() routines are used to step
--through and retrieve the list of entries from a search result chain.
--The ldap_first_reference() and ldap_next_reference() routines are used
--to step through and retrieve the list of continuation references from a
--search result chain.  ldap_count_entries() is used to count the number
--of entries returned. ldap_count_references() is used to count the number
--of references returned.
--
--           LDAPMessage *ldap_first_entry( LDAP *ld, LDAPMessage *res );
--
--           LDAPMessage *ldap_next_entry( LDAP *ld, LDAPMessage *entry );
--
--           LDAPMessage *ldap_first_reference( LDAP *ld, LDAPMessage *res );
--
--
--
--Expires: January 1998                                          [Page 34]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           LDAPMessage *ldap_next_reference( LDAP *ld, LDAPMessage *ref );
--
--           int ldap_count_entries( LDAP *ld, LDAPMessage *res );
--
--           int ldap_count_references( LDAP *ld, LDAPMessage *res );
--
--Parameters are:
--
--ld     The session handle.
--
--res    The search result, as obtained by a call to one of the synchro-
--       nous search routines or ldap_result().
--
--entry  The entry returned by a previous call to ldap_first_entry() or
--       ldap_next_entry().
--
--ldap_first_entry() and ldap_next_entry() will return NULL when no more
--entries or references exist in the result set to be returned.  NULL is
--also returned if an error occurs while stepping through the entries, in
--which case the error parameters in the session handle ld will be set to
--indicate the error.
--
--ldap_count_entries() returns the number of entries contained in a chain
--of entries. It can also be used to count the number of entries that
--remain in a chain if called with a message, entry or reference returned
--by ldap_first_message(), ldap_next_message(), ldap_first_entry(),
--ldap_next_entry(), ldap_first_reference(), ldap_next_reference().
--
--ldap_count_references() returns the number of references contained in a
--chain of search results. It can also be used to count the number of
--references that remain in a chain.
--
--
--12.2.  Stepping Through the Attributes of an Entry
--
--The ldap_first_attribute() and ldap_next_attribute() calls are used to
--step through the list of attribute types returned with an entry.
--
--           char *ldap_first_attribute(
--                   LDAP            *ld,
--                   LDAPMessage     *entry,
--                   BerElement      **ptr
--           );
--
--           char *ldap_next_attribute(
--                   LDAP            *ld,
--                   LDAPMessage     *entry,
--                   BerElement      *ptr
--
--
--
--Expires: January 1998                                          [Page 35]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           );
--
--           void ldap_memfree( char *mem );
--
--Parameters are:
--
--ld     The session handle.
--
--entry  The entry whose attributes are to be stepped through, as returned
--       by ldap_first_entry() or ldap_next_entry().
--
--ptr    In ldap_first_attribute(), the address of a pointer used inter-
--       nally to keep track of the current position in the entry. In
--       ldap_next_attribute(), the pointer returned by a previous call to
--       ldap_first_attribute().
--
--mem    A pointer to memory allocated by the LDAP library, such as the
--       attribute names returned by ldap_first_attribute() and
--       ldap_next_attribute, or the DN returned by ldap_get_dn().
--
--ldap_first_attribute() and ldap_next_attribute() will return NULL when
--the end of the attributes is reached, or if there is an error, in which
--case the error parameters in the session handle ld will be set to indi-
--cate the error.
--
--Both routines return a pointer to an allocated buffer containing the
--current attribute name. This should be freed when no longer in use by
--calling ldap_memfree().
--
--ldap_first_attribute() will allocate and return in ptr a pointer to a
--BerElement used to keep track of the current position. This pointer
--should be passed in subsequent calls to ldap_next_attribute() to step
--through the entry's attributes. After a set of calls to
--ldap_first_attribute() and ldap_next_attibute(), if ptr is non-NULL, it
--should be freed by calling ldap_ber_free( ptr, 0 ). Note that it is very
--important to pass the second parameter as 0 (zero) in this call.
--
--The attribute names returned are suitable for passing in a call to
--ldap_get_values() and friends to retrieve the associated values.
--
--
--12.3.  Retrieving the Values of an Attribute
--
--ldap_get_values() and ldap_get_values_len() are used to retrieve the
--values of a given attribute from an entry. ldap_count_values() and
--ldap_count_values_len() are used to count the returned values.
--ldap_value_free() and ldap_value_free_len() are used to free the values.
--
--
--
--
--Expires: January 1998                                          [Page 36]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           char **ldap_get_values(
--                   LDAP            *ld,
--                   LDAPMessage     *entry,
--                   char            *attr
--           );
--
--           struct berval **ldap_get_values_len(
--                   LDAP            *ld,
--                   LDAPMessage     *entry,
--                   char            *attr
--           );
--
--           int ldap_count_values( char **vals );
--
--           int ldap_count_values_len( struct berval **vals );
--
--           int ldap_value_free( char **vals );
--
--           int ldap_value_free_len( struct berval **vals );
--
--Parameters are:
--
--ld     The session handle.
--
--entry  The entry from which to retrieve values, as returned by
--       ldap_first_entry() or ldap_next_entry().
--
--attr   The attribute whose values are to be retrieved, as returned by
--       ldap_first_attribute() or ldap_next_attribute(), or a caller-
--       supplied string (e.g., "mail").
--
--vals   The values returned by a previous call to ldap_get_values() or
--       ldap_get_values_len().
--
--Two forms of the various calls are provided. The first form is only
--suitable for use with non-binary character string data. The second _len
--form is used with any kind of data.
--
--Note that the values returned are dynamically allocated and should be
--freed by calling either ldap_value_free() or ldap_value_free_len() when
--no longer in use.
--
--
--12.4.  Retrieving the name of an entry
--
--ldap_get_dn() is used to retrieve the name of an entry.
--ldap_explode_dn() and ldap_explode_rdn() are used to break up a name
--into its component parts. ldap_dn2ufn() is used to convert the name into
--
--
--
--Expires: January 1998                                          [Page 37]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--a more "user friendly" format.
--
--           char *ldap_get_dn( LDAP *ld, LDAPMessage *entry );
--
--           char **ldap_explode_dn( char *dn, int notypes );
--
--           char **ldap_explode_rdn( char *rdn, int notypes );
--
--           char *ldap_dn2ufn( char *dn );
--
--Parameters are:
--
--ld      The session handle.
--
--entry   The entry whose name is to be retrieved, as returned by
--        ldap_first_entry() or ldap_next_entry().
--
--dn      The dn to explode, such as returned by ldap_get_dn().
--
--rdn     The rdn to explode, such as returned in the components of the
--        array returned by ldap_explode_dn().
--
--notypes A boolean parameter, if non-zero indicating that the dn or rdn
--        components should have their type information stripped off
--        (i.e., "cn=Babs" would become "Babs").
--
--ldap_get_dn() will return NULL if there is some error parsing the dn,
--setting error parameters in the session handle ld to indicate the error.
--It returns a pointer to malloc'ed space that the caller should free by
--calling ldap_memfree() when it is no longer in use.  Note the format of
--the DNs returned is given by [4].
--
--ldap_explode_dn() returns a NULL-terminated char * array containing the
--RDN components of the DN supplied, with or without types as indicated by
--the notypes parameter. The array returned should be freed when it is no
--longer in use by calling ldap_value_free().
--
--ldap_explode_rdn() returns a NULL-terminated char * array containing the
--components of the RDN supplied, with or without types as indicated by
--the notypes parameter. The array returned should be freed when it is no
--longer in use by calling ldap_value_free().
--
--ldap_dn2ufn() converts the DN into the user friendly format described in
--[5]. The UFN returned is malloc'ed space that should be freed by a call
--to ldap_memfree() when no longer in use.
--
--
--
--
--
--
--Expires: January 1998                                          [Page 38]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--13.  Encoded ASN.1 Value Manipulation
--
--This section describes routines which may be used to encode and decode
--BER-encoded ASN.1 values, which are often used inside of control and
--extension values.
--
--With the exceptions of two new functions ber_flatten() and ber_init(),
--these functions are compatible with the University of Michigan LDAP 3.3
--implementation of BER.
--
--
--13.1.  General
--
--           struct berval {
--                   unsigned long   bv_len;
--                   char            *bv_val;
--           };
--
--A struct berval contains a sequence of bytes and an indication of its
--length.  The bv_val is not null terminated.  bv_len must always be a
--nonnegative number.  Applications may allocate their own berval struc-
--tures.
--
--           typedef struct berelement {
--                   /* opaque */
--           } BerElement;
--
--The BerElement structure contains not only a copy of the encoded value,
--but also state information used in encoding or decoding.  Applications
--cannot allocate their own BerElement structures.  The internal state is
--neither thread-specific nor locked, so two threads should not manipulate
--the same BerElement value simultaneously.
--
--A single BerElement value cannot be used for both encoding and decoding.
--
--           void ber_bvfree ( struct berval *bv);
--
--ber_bvfree() frees a berval returned from this API.  Both the bv->bv_val
--string and the berval itself are freed.  Applications should not use
--ber_bvfree() with bervals which the application has allocated.
--
--           void ber_bvecfree ( struct berval **bv );
--
--ber_bvecfree() frees an array of bervals returned from this API.  Each
--of the bervals in the array are freed using ber_bvfree(), then the array
--itself is freed.
--
--           struct berval *ber_bvdup (struct berval *bv );
--
--
--
--Expires: January 1998                                          [Page 39]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--ber_bvdup() returns a copy of a berval.  The bv_val field in the
--returned berval points to a different area of memory as the bv_val field
--in the argument berval.  The null pointer is returned on error (e.g. out
--of memory).
--
--           void ber_free ( BerElement *ber, int fbuf );
--
--ber_free() frees a BerElement which is returned from the API calls
--ber_alloc_t() or ber_init().  Each BerElement must be freed by the
--caller.  The second argument fbuf should always be set to 1.
--
--
--13.2.  Encoding
--
--           BerElement *ber_alloc_t(int options);
--
--ber_alloc_t() constructs and returns BerElement.  The null pointer is
--returned on error.  The options field contains a bitwise-or of options
--which are to be used when generating the encoding of this BerElement.
--One option is defined and must always be supplied:
--
--           #define LBER_USE_DER 0x01
--
--When this option is present, lengths will always be encoded in the
--minimum number of octets.  Note that this option does not cause values
--of sets and sequences to be rearranged in tag and byte order, so these
--functions are not suitable for generating DER output as defined in X.509
--and X.680.
--
--Unrecognized option bits are ignored.
--
--The BerElement returned by ber_alloc_t() is initially empty.  Calls to
--ber_printf() will append bytes to the end of the ber_alloc_t().
--
--           int ber_printf(BerElement *ber, char *fmt, ... )
--
--The ber_printf() routine is used to encode a BER element in much the
--same way that sprintf() works.  One important difference, though, is
--that state information is kept in the ber argument so that multiple
--calls can be made to ber_printf() to append to the end of the BER ele-
--ment. ber must be a pointer to a BerElement returned by ber_alloc_t().
--ber_printf() interprets and formats its arguments according to the for-
--mat string fmt.  ber_printf() returns -1 if there is an error during
--encoding.  As with sprintf(), each character in fmt refers to an argu-
--ment to ber_printf().
--
--The format string can contain the following format characters:
--
--
--
--
--Expires: January 1998                                          [Page 40]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--'t'     Tag.  The next argument is an int specifying the tag to override
--        the next element to be written to the ber.  This works across
--        calls.  The int value must contain the tag class, constructed
--        bit, and tag value.  The tag value must fit in a single octet
--        (tag value is less than 32).  For example, a tag of "[3]" for a
--        constructed type is 0xA3.
--
--'b'     Boolean.  The next argument is an int, containing either 0 for
--        FALSE or 0xff for TRUE.  A boolean element is output.  If this
--        format character is not preceded by the 't' format modifier, the
--        tag 0x01 is used for the element.
--
--'i'     Integer.  The next argument is an int, containing the integer in
--        the host's byte order.  An integer element is output. If this
--        format character is not preceded by the 't' format modifier, the
--        tag 0x02 is used for the element.
--
--'X'     Bitstring.  The next two arguments are a char * pointer to the
--        start of the bitstring, followed by an int containing the number
--        of bits in the bitstring.  A bitstring element is output, in
--        primitive form.  If this format character is not preceded by the
--        't' format modifier, the tag 0x03 is used for the element.
--
--'n'     Null.  No argument is required.  An ASN.1 NULL element is out-
--        put.  If this format character is not preceded by the 't' format
--        modifier, the tag 0x05 is used for the element.
--
--'o'     Octet string.  The next two arguments are a char *, followed by
--        an int with the length of the string.  The string may contain
--        null bytes and need not by null-terminated.   An octet string
--        element is output, in primitive form.  If this format character
--        is not preceded by the 't' format modifier, the tag 0x04 is used
--        for the element.
--
--'s'     Octet string.  The next argument is a char * pointing to a
--        null-terminated string.  An octet string element in primitive
--        form is output, which does not include the trailing ' ' byte. If
--        this format character is not preceded by the 't' format modif-
--        ier, the tag 0x04 is used for the element.
--
--'v'     Several octet strings.  The next argument is a char **, an array
--        of char * pointers to null-terminated strings.  The last element
--        in the array must be a null pointer. The octet strings do not
--        include the trailing SEQUENCE OF octet strings.  The 't' format
--        modifier cannot be used with this format character.
--
--'V'     Several octet strings.  A null-terminated array of berval *'s is
--        supplied. Note that a construct like '{V}' is required to get an
--
--
--
--Expires: January 1998                                          [Page 41]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--        actual SEQUENCE OF octet strings. The 't' format modifier cannot
--        be used with this format character.
--
--'{'     Begin sequence.  No argument is required.  If this format char-
--        acter is not preceded by the 't' format modifier, the tag 0x30
--        is used.
--
--'}'     End sequence.  No argument is required.  The 't' format modifier
--        cannot be used with this format character.
--
--'['     Begin set.  No argument is required.  If this format character
--        is not preceded by the 't' format modifier, the tag 0x31 is
--        used.
--
--']'     End set.  No argument is required.  The 't' format modifier can-
--        not be used with this format character.
--
--Each use of a '{' format character must be matched by a '}' character,
--either later in the format string, or in the format string of a subse-
--quent call to ber_printf() for that BerElement.  The same applies to the
--'[' and
--
--Sequences and sets nest, and implementations of this API must maintain
--internal state to be able to properly calculate the lengths.
--
--           int ber_flatten (BerElement *ber, struct berval **bvPtr);
--
--The ber_flatten routine allocates a struct berval whose contents are a
--BER encoding taken from the ber argument. The bvPtr pointer points to
--the returned berval, which must be freed using ber_bvfree().  This rou-
--tine returns 0 on success and -1 on error.
--
--The ber_flatten API call is not present in U-M LDAP 3.3.
--
--The use of ber_flatten on a BerElement in which all '{' and '}' format
--modifiers have not been properly matched can result in a berval whose
--contents are not a valid BER encoding.
--
--
--13.3.  Encoding Example
--
--The following is an example of encoding the following ASN.1 data type:
--
--      Example1Request ::= SEQUENCE {
--           s     OCTET STRING, -- must be printable
--           val1  INTEGER,
--           val2  [0] INTEGER DEFAULT 0
--      }
--
--
--
--Expires: January 1998                                          [Page 42]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--      int encode_example1(char *s,int val1,int val2,struct berval **bvPtr)
--      {
--           BerElement *ber;
--           int rc;
--
--           ber = ber_alloc_t(LBER_USE_DER);
--
--           if (ber == NULL) return -1;
--
--           if (ber_printf(ber,"{si",s,val1) == -1) {
--                   ber_free(ber,1);
--                   return -1;
--           }
--
--           if (val2 != 0) {
--                   if (ber_printf(ber,"ti",0x80,val2) == -1) {
--                           ber_free(ber,1);
--                           return -1;
--                   }
--           }
--
--           if (ber_printf(ber,"}") == -1) {
--                   ber_free(ber,1);
--                   return -1;
--           }
--
--           rc = ber_flatten(ber,bvPtr);
--           ber_free(ber,1);
--           return -1;
--      }
--
--
--13.4.  Decoding
--
--The following two symbols are available to applications.
--
--           #define LBER_ERROR   0xffffffffL
--           #define LBER_DEFAULT 0xffffffffL
--
--           BerElement *ber_init (struct berval *bv);
--
--The ber_init functions construct BerElement and returns a new BerElement
--containing a copy of the data in the bv argument.  ber_init returns the
--null pointer on error.
--
--           unsigned long ber_scanf (BerElement *ber, char *fmt, ... );
--
--The ber_scanf() routine is used to decode a BER element in much the same
--
--
--
--Expires: January 1998                                          [Page 43]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--way that sscanf() works.  One important difference, though, is that some
--state information is kept with the ber argument so that multiple calls
--can be made to ber_scanf() to sequentially read from the BER element.
--The ber argument must be a pointer to a BerElement returned by
--ber_init().  ber_scanf interprets the bytes according to the format
--string fmt, and stores the results in its additional arguments.
--ber_scanf() returns LBER_ERROR on error, and a nonnegative number on
--success.
--
--The format string contains conversion specifications which are used to
--direct the interpretation of the BER element.  The format string can
--contain the following characters:
--
--'a'     Octet string.  A char ** argument should be supplied.  Memory is
--        allocated, filled with the contents of the octet string, null-
--        terminated, and the pointer to the string is stored in the argu-
--        ment.  The returned value must be freed using ldap_memfree.  The
--        tag of the element must indicate the primitive form (constructed
--        strings are not supported) but is otherwise ignored and dis-
--        carded during the decoding.  This format cannot be used with
--        octet strings which could contain null bytes.
--
--'O'     Octet string.  A struct berval ** argument should be supplied,
--        which upon return points to a allocated struct berval containing
--        the octet string and its length.  ber_bvfree() must be called to
--        free the allocated memory.  The tag of the element must indicate
--        the primitive form (constructed strings are not supported) but
--        is otherwise ignored during the decoding.
--
--'b'     Boolean.  A pointer to an int should be supplied. The int value
--        stored will be 0 for FALSE or nonzero for TRUE.  The tag of the
--        element must indicate the primitive form but is otherwise
--        ignored during the decoding.
--
--'i'     Integer.  A pointer to an int should be supplied. The int value
--        stored will be in host byte order.  The tag of the element must
--        indicate the primitive form but is otherwise ignored during the
--        decoding.  ber_scanf() will return an error if the integer can-
--        not be stored in an int.
--
--'B'     Bitstring.  A char ** argument should be supplied which will
--        point to the allocated bits, followed by an unsigned long *
--        argument, which will point to the length (in bits) of the bit-
--        string returned.  ldap_memfree must be called to free the bit-
--        string.  The tag of the element must indicate the primitive form
--        (constructed bitstrings are not supported) but is otherwise
--        ignored during the decoding.
--
--
--
--
--Expires: January 1998                                          [Page 44]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--'n'     Null.  No argument is required.  The element is simply skipped
--        if it is recognized as a zero-length element.  The tag is
--        ignored.
--
--'v'     Several octet strings.  A char *** argument should be supplied,
--        which upon return points to a allocated null-terminated array of
--        char *'s containing the octet strings.  NULL is stored if the
--        sequence is empty.  ldap_memfree must be called to free each
--        element of the array and the array itself.  The tag of the
--        sequence and of the octet strings are ignored.
--
--'V'     Several octet strings (which could contain null bytes).  A
--        struct berval *** should be supplied, which upon return points
--        to a allocated null-terminated array of struct berval *'s con-
--        taining the octet strings and their lengths.  NULL is stored if
--        the sequence is empty. ber_bvecfree() can be called to free the
--        allocated memory.  The tag of the sequence and of the octet
--        strings are ignored.
--
--'x'     Skip element.  The next element is skipped.  No argument is
--        required.
--
--'{'     Begin sequence.  No argument is required.  The initial sequence
--        tag and length are skipped.
--
--'}'     End sequence.  No argument is required.
--
--'['     Begin set.  No argument is required.  The initial set tag and
--        length are skipped.
--
--']'     End set.  No argument is required.
--
--           unsigned long ber_peek_tag (BerElement *ber, unsigned long *lenPtr);
--
--ber_peek_tag() returns the tag of the next element to be parsed in the
--BerElement argument.  The length of this element is stored in the
--*lenPtr argument.  LBER_DEFAULT is returned if there is no further data
--to be read.  The ber argument is not modified.
--
--           unsigned long ber_skip_tag (BerElement *ber, unsigned long *lenPtr);
--
--ber_skip_tag() is similar to ber_peek_tag(), except that the state
--pointer in the BerElement argument is advanced past the first tag and
--length, and is pointed to the value part of the next element.  This rou-
--tine should only be used with constructed types and situations when a
--BER encoding is used as the value of an OCTET STRING.  The length of the
--value is stored in *lenPtr.
--
--
--
--
--Expires: January 1998                                          [Page 45]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           unsigned long ber_first_element(BerElement *ber,
--                   unsigned long *lenPtr, char **opaquePtr);
--
--           unsigned long ber_next_element  (BerElement *ber,
--                   unsigned long *lenPtr, char *opaque);
--
--ber_first_element() and ber_next_element() are used to traverse a SET,
--SET OF, SEQUENCE or SEQUENCE OF data value. ber_first_element() calls
--ber_skip_tag(), stores internal information in *lenPtr and *opaquePtr,
--and calls ber_peek_tag() for the first element inside the constructed
--value. LBER_DEFAULT is returned if the constructed value is empty.
--ber_next_element() positions the state at the start of the next element
--in the constructed type.  LBER_DEFAULT is returned if there are no
--further values.
--
--The len and opaque values should not be used by applications other than
--as arguments to ber_next_element(), as shown in the example below.
--
--
--13.5.  Decoding Example
--
--The following is an example of decoding an ASN.1 data type:
--
--      Example2Request ::= SEQUENCE {
--           dn    OCTET STRING, -- must be printable
--           scope ENUMERATED { b (0), s (1), w (2) },
--           ali   ENUMERATED { n (0), s (1), f (2), a (3) },
--           size  INTEGER,
--           time  INTEGER,
--           tonly BOOLEAN,
--           attrs SEQUENCE OF OCTET STRING, -- must be printable
--           [0] SEQUENCE OF SEQUENCE {
--              type  OCTET STRING -- must be printable,
--              crit  BOOLEAN DEFAULT FALSE,
--              value OCTET STRING
--           } OPTIONAL }
--
--      #define LDAP_TAG_CONTROL_LIST 0xA0L /* context specific cons 0 */
--
--      int decode_example2(struct berval *bv)
--      {
--           BerElement *ber;
--           unsigned long len;
--           int scope, ali, size, time, tonly;
--           char *dn = NULL, **attrs = NULL;
--           int res,i,rc = 0;
--
--           ber = ber_init(bv);
--
--
--
--Expires: January 1998                                          [Page 46]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--           if (ber == NULL) {
--                   printf("ERROR ber_init failed0);
--                   return -1;
--           }
--
--           res = ber_scanf(ber,"{aiiiiib{v}",&dn,&scope,&ali,
--                           &size,&time,&tonly,&attrs);
--
--           if (res == -1) {
--                   printf("ERROR ber_scanf failed0);
--                   ber_free(ber,1);
--                   return -1;
--           }
--
--           /* *** use dn */
--           ldap_memfree(dn);
--
--           for (i = 0; attrs != NULL && attrs[i] != NULL; i++) {
--                   /* *** use attrs[i] */
--                   ldap_memfree(attrs[i]);
--           }
--           ldap_memfree(attrs);
--
--           if (ber_peek_tag(ber,&len) == LDAP_TAG_CONTROL_LIST) {
--                   char *opaque;
--                   unsigned long tag;
--
--                   for (tag = ber_first_element(ber,&len,&opaque);
--                        tag != LBER_DEFAULT;
--                        tag = ber_next_element (ber,&len,opaque)) {
--
--                           unsigned long ttag, tlen;
--                           char *type;
--                           int crit;
--                           struct berval *value;
--
--                           if (ber_scanf(ber,"{a",&type) == LBER_ERROR) {
--                                   printf("ERROR cannot parse type0);
--                                   break;
--                           }
--                           /* *** use type */
--                           ldap_memfree(type);
--
--                           ttag = ber_peek_tag(ber,&tlen);
--                           if (ttag == 0x01) {  /* boolean */
--                                   if (ber_scanf(ber,"b",
--                                                 &crit) == LBER_ERROR) {
--                                           printf("ERROR cannot parse crit0);
--
--
--
--Expires: January 1998                                          [Page 47]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--                                           rc = -1;
--                                           break;
--                                   }
--                           } else if (ttag == 0x04) { /* octet string */
--                                   crit = 0;
--                           } else {
--                                   printf("ERROR extra field in controls0);
--                                   break;
--                           }
--
--                           if (ber_scanf(ber,"O}",&value) == LBER_ERROR) {
--                                   printf("ERROR cannot parse value0);
--                                   rc = -1;
--                                   break;
--                           }
--                           /* *** use value */
--                           ldap_bvfree(value);
--                   }
--           }
--
--           ber_scanf(ber,"}");
--
--           ber_free(ber,1);
--
--           return rc;
--       }
--
--
--
--14.  Security Considerations
--
--LDAPv2 supports security through protocol-level authentication using
--clear-text passwords.  LDAPv3 adds support for SASL [8] (Simple Authen-
--tication Security Layer) methods.  LDAPv3 also supports operation over a
--secure transport layer using Transport Layer Security TLS [8].  Readers
--are referred to the protocol documents for discussion of related secu-
--rity considerations.
--
--Implementations of this API should be cautious when handling authentica-
--tion credentials.  In particular, keeping long-lived copies of creden-
--tials without the application's knowledge is discouraged.
--
--
--15.  Acknowledgements
--
--Many members of the IETF ASID working group as well as members of the
--Internet at large have provided useful comments and suggestions that
--have been incorporated into this revision.
--
--
--
--Expires: January 1998                                          [Page 48]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--This original material upon which this revision is based was based upon
--work supported by the National Science Foundation under Grant No.  NCR-
--9416667.
--
--
--16.  Bibliography
--
--[1]  The Directory: Selected Attribute Syntaxes.  CCITT, Recommendation
--     X.520.
--
--[2]  M. Wahl, A. Coulbeck, T. Howes, S. Kille, W. Yeong, C. Robbins,
--     "Lightweight Directory Access Protocol Attribute Syntax Defini-
--     tions", INTERNET-DRAFT <draft-ietf-asid-ldapv3-attributes-06.txt>,
--     11 July 1997.
--
--[3]  T. Howes, "A String Representation of LDAP Search Filters,"
--     INTERNET-DRAFT <draft-ietf-asid-ldapv3-filter-02.txt>, May 1997.
--
--[4]  S. Kille, M. Wahl, "A UTF-8 String Representation of Distinguished
--     Names", INTERNET-DRAFT <draft-ietf-asid-ldapv3-dn-03.txt>, 29 April
--     1997.
--
--[5]  S. Kille, "Using the OSI Directory to Achieve User Friendly Nam-
--     ing," RFC 1781, March 1995.
--
--[6]  M. Wahl, T. Howes, S. Kille, "Lightweight Directory Access Protocol
--     (v3)", INTERNET-DRAFT <draft-ietf-asid-ldapv3-protocol-06.txt>, 11
--     July 1997.
--
--[7]  A. Herron, T. Howes, M. Wahl, "LDAP Control Extension for Server
--     Side Sorting of Search Result," INTERNET-DRAFT <draft-ietf-asid-
--     ldapv3-sorting-00.txt>, 16 April 1997.
--
--[8]  J. Meyers, "Simple Authentication and Security Layer", INTERNET-
--     DRAFT <draft-myers-auth-sasl-11.txt>, April 1997.
--
--[9]  "Lightweight Directory Access Protocol (v3) Extension for Transport
--     Layer Security", INTERNET-DRAFT <draft-ietf-asid-ldapv3-tls-
--     01.txt>, June 1997.
--
--[10] "UTF-8, a transformation format of Unicode and ISO 10646", RFC
--     2044, October 1996.
--
--[11] "IP Version 6 Addressing Architecture,", RFC 1884, December 1995.
--
--
--
--
--
--
--
--Expires: January 1998                                          [Page 49]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--17.  Author's Addresses
--
--   Tim Howes
--   Netscape Communications Corp.
--   501 E. Middlefield Rd., Mailstop MV068
--   Mountain View, CA 94043
--   USA
--   +1 415 937-3419
--   howes@netscape.com
--
--
--   Mark Smith
--   Netscape Communications Corp.
--   501 E. Middlefield Rd., Mailstop MV068
--   Mountain View, CA 94043
--   USA
--   +1 415 937-3477
--   mcs@netscape.com
--
--   Andy Herron
--   Microsoft Corp.
--   1 Microsoft Way
--   Redmond, WA 98052
--   USA
--   +1 425 882-8080
--   andyhe@microsoft.com
--
--   Chris Weider
--   Microsoft Corp.
--   1 Microsoft Way
--   Redmond, WA 98052
--   USA
--   +1 425 882-8080
--   cweider@microsoft.com
--
--   Mark Wahl
--   Critical Angle Inc.
--   4815 W Braker Lane #502-385
--   Austin, TX 78759
--   USA
--   M.Wahl@critical-angle.com
--
--
--18.  Appendix A - Sample LDAP API Code
--
--   #include <ldap.h>
--
--   main()
--
--
--
--Expires: January 1998                                          [Page 50]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--   {
--           LDAP            *ld;
--           LDAPMessage     *res, *e;
--           int             i;
--           char            *a, *dn;
--           BerElement      *ptr;
--           char            **vals;
--
--           /* open an LDAP session */
--           if ( (ld = ldap_init( "dotted.host.name", LDAP_PORT )) == NULL )
--                   exit( 1 );
--
--           /* authenticate as nobody */
--           if ( ldap_simple_bind_s( ld, NULL, NULL ) != LDAP_SUCCESS ) {
--                   ldap_perror( ld, "ldap_simple_bind_s" );
--                   exit( 1 );
--           }
--
--           /* search for entries with cn of "Babs Jensen", return all attrs  */
--           if ( ldap_search_s( ld, "o=University of Michigan, c=US",
--               LDAP_SCOPE_SUBTREE, "(cn=Babs Jensen)", NULL, 0, &res )
--               != LDAP_SUCCESS ) {
--                   ldap_perror( ld, "ldap_search_s" );
--                   exit( 1 );
--           }
--
--           /* step through each entry returned */
--           for ( e = ldap_first_entry( ld, res ); e != NULL;
--               e = ldap_next_entry( ld, e ) ) {
--                   /* print its name */
--                   dn = ldap_get_dn( ld, e );
--                   printf( "dn: %s\n", dn );
--                   ldap_memfree( dn );
--
--                   /* print each attribute */
--                   for ( a = ldap_first_attribute( ld, e, &ptr ); a != NULL;
--                       a = ldap_next_attribute( ld, e, ptr ) ) {
--                           printf( "attribute: %s\n", a );
--
--                           /* print each value */
--                           vals = ldap_get_values( ld, e, a );
--                           for ( i = 0; vals[i] != NULL; i++ ) {
--                                   printf( "value: %s\n", vals[i] );
--                           }
--                           ldap_value_free( vals );
--                   }
--                   if ( ptr != NULL ) {
--                           ldap_ber_free( ptr, 0 );
--
--
--
--Expires: January 1998                                          [Page 51]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--                   }
--           }
--           /* free the search results */
--           ldap_msgfree( res );
--
--           /* close and free connection resources */
--           ldap_unbind( ld );
--   }
--
--
--
--19.  Appendix B - Outstanding Issues
--
--
--19.1.  Support for multithreaded applications
--
--In order to support multithreaded applications in a platform-independent
--way, some additions to the LDAP API are needed.  Different implementors
--have taken different paths to solve this problem in the past.  A common
--set of thread-related API calls must be defined so that application
--developers are not unduly burdened.  These will be added to a future
--revision of this specification.
--
--
--19.2.  Using Transport Layer Security (TLS)
--
--The API calls used to support TLS must be specified.  They will be added
--to a future revision of this specification.
--
--
--19.3.  Client control for chasing referrals
--
--A client control has been defined that can be used to specify on a per-
--operation basis whether references and external referrals are automati-
--cally chased by the client library.  This will be added to a future
--revision of this specification.
--
--
--19.4.  Potential confusion between hostname:port and IPv6 addresses
--
--String representations of IPv6 network addresses [11] can contain colon
--characters.  The ldap_init() call is specified to take strings of the
--form "hostname:port" or "ipaddress:port".  If IPv6 addresses are used,
--the latter could be ambiguous.  A future revision of this specification
--will resolve this issue.
--
--
--
--
--
--
--Expires: January 1998                                          [Page 52]
--\f
--C LDAP API      The C LDAP Application Program Interface    29 July 1997
--
--
--19.5.  Need to track SASL API standardization efforts
--
--If a standard Simple Authentication and Security Layer API is defined,
--it may be necessary to modify the LDAP API to accommodate it.
--
--
--19.6.  Support for character sets other than UTF-8?
--
--Some application developers would prefer to pass string data using a
--character set other than UTF-8.  This could be accommodated by adding a
--new option to ldap_set_option() that supports choosing a character set.
--If this feature is added, the number of different character sets sup-
--ported should definitely be minimized.
--
--
--19.7.  Use of UTF-8 with LDAPv2 servers
--
--Strings are always passed as UTF-8 in this API but LDAP version 2
--servers do not support the full range of UTF-8 characters.  The expected
--behavior of this API when using LDAP version 2 with unsupported charac-
--ters should be specified.
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--Expires: January 1998                                          [Page 53]
--\f
--
--
--1.     Status of this Memo............................................1
--2.     Introduction...................................................1
--3.     Overview of the LDAP Model.....................................2
--4.     Overview of LDAP API Use.......................................3
--5.     Common Data Structures.........................................4
--6.     LDAP Error Codes...............................................5
--7.     Performing LDAP Operations.....................................6
--7.1.      Initializing an LDAP Session................................6
--7.2.      LDAP Session Handle Options.................................7
--7.3.      Working with controls.......................................10
--7.4.      Authenticating to the directory.............................11
--7.5.      Closing the session.........................................13
--7.6.      Searching...................................................13
--7.7.      Reading an Entry............................................17
--7.8.      Listing the Children of an Entry............................17
--7.9.      Comparing a Value Against an Entry..........................17
--7.10.     Modifying an entry..........................................19
--7.11.     Modifying the Name of an Entry..............................21
--7.12.     Adding an entry.............................................23
--7.13.     Deleting an entry...........................................25
--7.14.     Extended Operations.........................................26
--8.     Abandoning An Operation........................................28
--9.     Obtaining Results and Peeking Inside LDAP Messages.............29
--10.    Handling Errors and Parsing Results............................31
--11.    Stepping Through a List of Results.............................33
--12.    Parsing Search Results.........................................34
--12.1.     Stepping Through a List of Entries..........................34
--12.2.     Stepping Through the Attributes of an Entry.................35
--12.3.     Retrieving the Values of an Attribute.......................36
--12.4.     Retrieving the name of an entry.............................37
--13.    Encoded ASN.1 Value Manipulation...............................39
--13.1.     General.....................................................39
--13.2.     Encoding....................................................40
--13.3.     Encoding Example............................................42
--13.4.     Decoding....................................................43
--13.5.     Decoding Example............................................46
--14.    Security Considerations........................................48
--15.    Acknowledgements...............................................48
--16.    Bibliography...................................................49
--17.    Author's Addresses.............................................50
--18.    Appendix A - Sample LDAP API Code..............................50
--19.    Appendix B - Outstanding Issues................................52
--19.1.     Support for multithreaded applications......................52
--19.2.     Using Transport Layer Security (TLS)........................52
--19.3.     Client control for chasing referrals........................52
--19.4.     Potential confusion between hostname:port and IPv6 addresses52
--19.5.     Need to track SASL API standardization efforts..............53
--19.6.     Support for character sets other than UTF-8?................53
--19.7.     Use of UTF-8 with LDAPv2 servers............................53
--
--
--
--
--
--
--
--
diff --git a/rabbitmq-server/plugins-src/generate_app b/rabbitmq-server/plugins-src/generate_app
deleted file mode 100644 (file)
index fb0eb1e..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-main([InFile, OutFile | SrcDirs]) ->
-    Modules = [list_to_atom(filename:basename(F, ".erl")) ||
-                  SrcDir <- SrcDirs,
-                  F <- filelib:wildcard("*.erl", SrcDir)],
-    {ok, [{application, Application, Properties}]} = file:consult(InFile),
-    NewProperties =
-        case proplists:get_value(modules, Properties) of
-            [] -> lists:keyreplace(modules, 1, Properties, {modules, Modules});
-            _  -> Properties
-        end,
-    file:write_file(
-      OutFile,
-      io_lib:format("~p.~n", [{application, Application, NewProperties}])).
diff --git a/rabbitmq-server/plugins-src/generate_deps b/rabbitmq-server/plugins-src/generate_deps
deleted file mode 100644 (file)
index 9f8485b..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
--mode(compile).
-
-%% We expect the list of Erlang source and header files to arrive on
-%% stdin, with the entries colon-separated.
-main([TargetFile, EbinDir]) ->
-    ErlsAndHrls = [ string:strip(S,left) ||
-                      S <- string:tokens(io:get_line(""), ":\n")],
-    ErlFiles = [F || F <- ErlsAndHrls, lists:suffix(".erl", F)],
-    Modules = sets:from_list(
-                [list_to_atom(filename:basename(FileName, ".erl")) ||
-                    FileName <- ErlFiles]),
-    HrlFiles = [F || F <- ErlsAndHrls, lists:suffix(".hrl", F)],
-    IncludeDirs = lists:usort([filename:dirname(Path) || Path <- HrlFiles]),
-    Headers = sets:from_list(HrlFiles),
-    Deps = lists:foldl(
-             fun (Path, Deps1) ->
-                     dict:store(Path, detect_deps(IncludeDirs, EbinDir,
-                                                  Modules, Headers, Path),
-                                Deps1)
-             end, dict:new(), ErlFiles),
-    {ok, Hdl} = file:open(TargetFile, [write, delayed_write]),
-    dict:fold(
-      fun (_Path, [], ok) ->
-              ok;
-          (Path, Dep, ok) ->
-              Module = filename:basename(Path, ".erl"),
-              ok = file:write(Hdl, [EbinDir, "/", Module, ".beam: ",
-                                   Path]),
-              ok = sets:fold(fun (E, ok) -> file:write(Hdl, [" ", E]) end,
-                             ok, Dep),
-              file:write(Hdl, ["\n"])
-      end, ok, Deps),
-    ok = file:write(Hdl, [TargetFile, ": ", escript:script_name(), "\n"]),
-    ok = file:sync(Hdl),
-    ok = file:close(Hdl).
-
-detect_deps(IncludeDirs, EbinDir, Modules, Headers, Path) ->
-    {ok, Forms} = epp:parse_file(Path, IncludeDirs, [{use_specs, true}]),
-    lists:foldl(
-      fun ({attribute, _Line, Attribute, Behaviour}, Deps)
-          when Attribute =:= behaviour orelse Attribute =:= behavior ->
-              maybe_add_to_deps(EbinDir, Modules, Behaviour, Deps);
-          ({attribute, _Line, compile, {parse_transform, Transform}}, Deps) ->
-              maybe_add_to_deps(EbinDir, Modules, Transform, Deps);
-          ({attribute, _Line, file, {FileName, _LineNumber1}}, Deps) ->
-              case sets:is_element(FileName, Headers) of
-                  true  -> sets:add_element(FileName, Deps);
-                  false -> Deps
-              end;
-          (_Form, Deps) ->
-              Deps
-      end, sets:new(), Forms).
-
-maybe_add_to_deps(EbinDir, Modules, Module, Deps) ->
-    case sets:is_element(Module, Modules) of
-        true  -> sets:add_element(
-                   [EbinDir, "/", atom_to_list(Module), ".beam"], Deps);
-        false -> Deps
-    end.
diff --git a/rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper b/rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper
deleted file mode 100644 (file)
index 0a0e13c..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-Eldap is "Copyright (c) 2010, Torbjorn Tornkvist" and is covered by
-the MIT license.  It was downloaded from https://github.com/etnt/eldap
-
diff --git a/rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper b/rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper
deleted file mode 100644 (file)
index c72a6af..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-Mochiweb is "Copyright (c) 2007 Mochi Media, Inc." and is covered by
-the MIT license.  It was downloaded from
-http://github.com/mochi/mochiweb/
-
diff --git a/rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper b/rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper
deleted file mode 100644 (file)
index c00fb92..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-Webmachine is Copyright (c) Basho Technologies and is covered by the
-Apache License 2.0.  It was downloaded from http://webmachine.basho.com/
-
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/mochiweb-wrapper/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch
deleted file mode 100644 (file)
index af582a7..0000000
+++ /dev/null
@@ -1,303 +0,0 @@
-diff --git a/src/mochiglobal.erl b/src/mochiglobal.erl
-index ea645b0..6b20e41 100644
---- a/src/mochiglobal.erl
-+++ b/src/mochiglobal.erl
-@@ -6,12 +6,12 @@
- -author("Bob Ippolito <bob@mochimedia.com>").
- -export([get/1, get/2, put/2, delete/1]).
---spec get(atom()) -> any() | undefined.
-+%% -spec get(atom()) -> any() | undefined.
- %% @equiv get(K, undefined)
- get(K) ->
-     get(K, undefined).
---spec get(atom(), T) -> any() | T.
-+%% -spec get(atom(), T) -> any() | T.
- %% @doc Get the term for K or return Default.
- get(K, Default) ->
-     get(K, Default, key_to_module(K)).
-@@ -22,7 +22,7 @@ get(_K, Default, Mod) ->
-             Default
-     end.
---spec put(atom(), any()) -> ok.
-+%% -spec put(atom(), any()) -> ok.
- %% @doc Store term V at K, replaces an existing term if present.
- put(K, V) ->
-     put(K, V, key_to_module(K)).
-@@ -33,7 +33,7 @@ put(_K, V, Mod) ->
-     {module, Mod} = code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
-     ok.
---spec delete(atom()) -> boolean().
-+%% -spec delete(atom()) -> boolean().
- %% @doc Delete term stored at K, no-op if non-existent.
- delete(K) ->
-     delete(K, key_to_module(K)).
-@@ -42,21 +42,21 @@ delete(_K, Mod) ->
-     code:purge(Mod),
-     code:delete(Mod).
---spec key_to_module(atom()) -> atom().
-+%% -spec key_to_module(atom()) -> atom().
- key_to_module(K) ->
-     list_to_atom("mochiglobal:" ++ atom_to_list(K)).
---spec compile(atom(), any()) -> binary().
-+%% -spec compile(atom(), any()) -> binary().
- compile(Module, T) ->
-     {ok, Module, Bin} = compile:forms(forms(Module, T),
-                                       [verbose, report_errors]),
-     Bin.
---spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
-+%% -spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
- forms(Module, T) ->
-     [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
---spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
-+%% -spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
- term_to_abstract(Module, Getter, T) ->
-     [%% -module(Module).
-      erl_syntax:attribute(
-diff --git a/src/mochiutf8.erl b/src/mochiutf8.erl
-index 28f28c1..c9d2751 100644
---- a/src/mochiutf8.erl
-+++ b/src/mochiutf8.erl
-@@ -11,11 +11,11 @@
- %% External API
---type unichar_low() :: 0..16#d7ff.
---type unichar_high() :: 16#e000..16#10ffff.
---type unichar() :: unichar_low() | unichar_high().
-+%% -type unichar_low() :: 0..16#d7ff.
-+%% -type unichar_high() :: 16#e000..16#10ffff.
-+%% -type unichar() :: unichar_low() | unichar_high().
---spec codepoint_to_bytes(unichar()) -> binary().
-+%% -spec codepoint_to_bytes(unichar()) -> binary().
- %% @doc Convert a unicode codepoint to UTF-8 bytes.
- codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) ->
-     %% U+0000 - U+007F - 7 bits
-@@ -40,12 +40,12 @@ codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) ->
-       2#10:2, B1:6,
-       2#10:2, B0:6>>.
---spec codepoints_to_bytes([unichar()]) -> binary().
-+%% -spec codepoints_to_bytes([unichar()]) -> binary().
- %% @doc Convert a list of codepoints to a UTF-8 binary.
- codepoints_to_bytes(L) ->
-     <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>.
---spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
-+%% -spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
- read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) ->
-     %% U+0000 - U+007F - 7 bits
-     <<B:1/binary, _/binary>> = Bin,
-@@ -82,32 +82,32 @@ read_codepoint(Bin = <<2#11110:5, B3:3,
-             {C, B, Rest}
-     end.
---spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
-+%% -spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
- codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) ->
-     Acc;
- codepoint_foldl(F, Acc, Bin) ->
-     {C, _, Rest} = read_codepoint(Bin),
-     codepoint_foldl(F, F(C, Acc), Rest).
---spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
-+%% -spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
- bytes_foldl(F, Acc, <<>>) when is_function(F, 2) ->
-     Acc;
- bytes_foldl(F, Acc, Bin) ->
-     {_, B, Rest} = read_codepoint(Bin),
-     bytes_foldl(F, F(B, Acc), Rest).
---spec bytes_to_codepoints(binary()) -> [unichar()].
-+%% -spec bytes_to_codepoints(binary()) -> [unichar()].
- bytes_to_codepoints(B) ->
-     lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)).
---spec len(binary()) -> non_neg_integer().
-+%% -spec len(binary()) -> non_neg_integer().
- len(<<>>) ->
-     0;
- len(B) ->
-     {_, _, Rest} = read_codepoint(B),
-     1 + len(Rest).
---spec valid_utf8_bytes(B::binary()) -> binary().
-+%% -spec valid_utf8_bytes(B::binary()) -> binary().
- %% @doc Return only the bytes in B that represent valid UTF-8. Uses
- %%      the following recursive algorithm: skip one byte if B does not
- %%      follow UTF-8 syntax (a 1-4 byte encoding of some number),
-@@ -118,7 +118,7 @@ valid_utf8_bytes(B) when is_binary(B) ->
- %% Internal API
---spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
-+%% -spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
- %% @doc Return B, but skipping the 0-based indexes in L.
- binary_skip_bytes(B, []) ->
-     B;
-@@ -126,7 +126,7 @@ binary_skip_bytes(B, L) ->
-     binary_skip_bytes(B, L, 0, []).
- %% @private
---spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
-+%% -spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
- binary_skip_bytes(B, [], _N, Acc) ->
-     iolist_to_binary(lists:reverse([B | Acc]));
- binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
-@@ -134,13 +134,13 @@ binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
- binary_skip_bytes(<<C, RestB/binary>>, L, N, Acc) ->
-     binary_skip_bytes(RestB, L, 1 + N, [C | Acc]).
---spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
-+%% -spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
- %% @doc Return the 0-based indexes in B that are not valid UTF-8.
- invalid_utf8_indexes(B) ->
-     invalid_utf8_indexes(B, 0, []).
- %% @private.
---spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
-+%% -spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
- invalid_utf8_indexes(<<C, Rest/binary>>, N, Acc) when C < 16#80 ->
-     %% U+0000 - U+007F - 7 bits
-     invalid_utf8_indexes(Rest, 1 + N, Acc);
-diff --git a/src/mochiweb_charref.erl b/src/mochiweb_charref.erl
-index 193c7c7..665d0f9 100644
---- a/src/mochiweb_charref.erl
-+++ b/src/mochiweb_charref.erl
-@@ -11,7 +11,7 @@
- %%      codepoint, or return undefined on failure.
- %%      The input should not include an ampersand or semicolon.
- %%      charref("#38") = 38, charref("#x26") = 38, charref("amp") = 38.
---spec charref(binary() | string()) -> integer() | [integer()] | undefined.
-+%% -spec charref(binary() | string()) -> integer() | [integer()] | undefined.
- charref(B) when is_binary(B) ->
-     charref(binary_to_list(B));
- charref([$#, C | L]) when C =:= $x orelse C =:= $X ->
-diff --git a/src/mochiweb_http.erl b/src/mochiweb_http.erl
-index 931ecd0..ae6410f 100644
---- a/src/mochiweb_http.erl
-+++ b/src/mochiweb_http.erl
-@@ -121,12 +121,12 @@ call_body({M, F}, Req) ->
- call_body(Body, Req) ->
-     Body(Req).
---spec handle_invalid_request(term()) -> no_return().
-+%% -spec handle_invalid_request(term()) -> no_return().
- handle_invalid_request(Socket) ->
-     handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []),
-     exit(normal).
---spec handle_invalid_request(term(), term(), term()) -> no_return().
-+%% -spec handle_invalid_request(term(), term(), term()) -> no_return().
- handle_invalid_request(Socket, Request, RevHeaders) ->
-     Req = new_request(Socket, Request, RevHeaders),
-     Req:respond({400, [], []}),
-diff --git a/src/mochiweb_session.erl b/src/mochiweb_session.erl
-index ac5d66b..ddf7c46 100644
---- a/src/mochiweb_session.erl
-+++ b/src/mochiweb_session.erl
-@@ -21,11 +21,11 @@
- %% @doc Generates a secure encrypted binary convining all the parameters. The
- %% expiration time must be a 32-bit integer.
---spec generate_session_data(
--        ExpirationTime :: expiration_time(),
--        Data :: iolist(),
--        FSessionKey :: key_fun(),
--        ServerKey :: iolist()) -> binary().
-+%% -spec generate_session_data(
-+%%        ExpirationTime :: expiration_time(),
-+%%        Data :: iolist(),
-+%%        FSessionKey :: key_fun(),
-+%%        ServerKey :: iolist()) -> binary().
- generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey)
-   when is_integer(ExpirationTime), is_function(FSessionKey)->
-     BData = ensure_binary(Data),
-@@ -39,11 +39,11 @@ generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey)
- %% @doc Convenience wrapper for generate_session_data that returns a
- %% mochiweb cookie with "id" as the key, a max_age of 20000 seconds,
- %% and the current local time as local time.
---spec generate_session_cookie(
--        ExpirationTime :: expiration_time(),
--        Data :: iolist(),
--        FSessionKey :: key_fun(),
--        ServerKey :: iolist()) -> header().
-+%% -spec generate_session_cookie(
-+%%        ExpirationTime :: expiration_time(),
-+%%        Data :: iolist(),
-+%%        FSessionKey :: key_fun(),
-+%%        ServerKey :: iolist()) -> header().
- generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey)
-   when is_integer(ExpirationTime), is_function(FSessionKey)->
-     CookieData = generate_session_data(ExpirationTime, Data,
-@@ -55,13 +55,13 @@ generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey)
-                                 calendar:universal_time())}]).
- %% TODO: This return type is messy to express in the type system.
---spec check_session_cookie(
--        ECookie :: binary(),
--        ExpirationTime :: string(),
--        FSessionKey :: key_fun(),
--        ServerKey :: iolist()) ->
--    {Success :: boolean(),
--     ExpTimeAndData :: [integer() | binary()]}.
-+%% -spec check_session_cookie(
-+    %%     ECookie :: binary(),
-+    %%     ExpirationTime :: string(),
-+    %%     FSessionKey :: key_fun(),
-+    %%     ServerKey :: iolist()) ->
-+    %% {Success :: boolean(),
-+    %%  ExpTimeAndData :: [integer() | binary()]}.
- check_session_cookie(ECookie, ExpirationTime, FSessionKey, ServerKey)
-   when is_binary(ECookie), is_integer(ExpirationTime),
-        is_function(FSessionKey) ->
-@@ -83,7 +83,7 @@ check_session_cookie(_ECookie, _ExpirationTime, _FSessionKey, _ServerKey) ->
-     {false, []}.
- %% 'Constant' time =:= operator for binary, to mitigate timing attacks.
---spec eq(binary(), binary()) -> boolean().
-+%% -spec eq(binary(), binary()) -> boolean().
- eq(A, B) when is_binary(A) andalso is_binary(B) ->
-     eq(A, B, 0).
-@@ -94,27 +94,27 @@ eq(<<>>, <<>>, 0) ->
- eq(_As, _Bs, _Acc) ->
-     false.
---spec ensure_binary(iolist()) -> binary().
-+%% -spec ensure_binary(iolist()) -> binary().
- ensure_binary(B) when is_binary(B) ->
-     B;
- ensure_binary(L) when is_list(L) ->
-     iolist_to_binary(L).
---spec encrypt_data(binary(), binary()) -> binary().
-+%% -spec encrypt_data(binary(), binary()) -> binary().
- encrypt_data(Data, Key) ->
-     IV = crypto:rand_bytes(16),
-     Crypt = crypto:aes_cfb_128_encrypt(Key, IV, Data),
-     <<IV/binary, Crypt/binary>>.
---spec decrypt_data(binary(), binary()) -> binary().
-+%% -spec decrypt_data(binary(), binary()) -> binary().
- decrypt_data(<<IV:16/binary, Crypt/binary>>, Key) ->
-     crypto:aes_cfb_128_decrypt(Key, IV, Crypt).
---spec gen_key(iolist(), iolist()) -> binary().
-+%% -spec gen_key(iolist(), iolist()) -> binary().
- gen_key(ExpirationTime, ServerKey)->
-     crypto:md5_mac(ServerKey, [ExpirationTime]).
---spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary().
-+%% -spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary().
- gen_hmac(ExpirationTime, Data, SessionKey, Key) ->
-     crypto:sha_mac(Key, [ExpirationTime, Data, SessionKey]).
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch
deleted file mode 100644 (file)
index 2656fa2..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/src/mochiweb_request.erl b/src/mochiweb_request.erl
-index 5d89662..6765ab0 100644
---- a/src/mochiweb_request.erl
-+++ b/src/mochiweb_request.erl
-@@ -42,7 +42,7 @@
- -define(IDLE_TIMEOUT, 300000).
- % Maximum recv_body() length of 1MB
---define(MAX_RECV_BODY, (1024*1024)).
-+-define(MAX_RECV_BODY, 104857600).
- %% @spec get_header_value(K) -> undefined | Value
- %% @doc Get the value of a given request header.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch
deleted file mode 100644 (file)
index 0d5c85a..0000000
+++ /dev/null
@@ -1,104 +0,0 @@
-diff --git a/src/mochitemp.erl b/src/mochitemp.erl
-index dda7863..f64876d 100644
---- a/src/mochitemp.erl
-+++ b/src/mochitemp.erl
-@@ -1,7 +1,7 @@
- %% @author Bob Ippolito <bob@mochimedia.com>
- %% @copyright 2010 Mochi Media, Inc.
--%% @doc Create temporary files and directories. Requires crypto to be started.
-+%% @doc Create temporary files and directories.
- -module(mochitemp).
- -export([gettempdir/0]).
-@@ -87,7 +87,7 @@ rngchars(N) ->
-     [rngchar() | rngchars(N - 1)].
- rngchar() ->
--    rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))).
-+    rngchar(mochiweb_util:rand_uniform(0, tuple_size(?SAFE_CHARS))).
- rngchar(C) ->
-     element(1 + C, ?SAFE_CHARS).
-@@ -177,7 +177,6 @@ gettempdir_cwd_test() ->
-     ok.
- rngchars_test() ->
--    crypto:start(),
-     ?assertEqual(
-        "",
-        rngchars(0)),
-@@ -199,7 +198,6 @@ rngchar_test() ->
-     ok.
- mkdtemp_n_failonce_test() ->
--    crypto:start(),
-     D = mkdtemp(),
-     Path = filename:join([D, "testdir"]),
-     %% Toggle the existence of a dir so that it fails
-@@ -246,7 +244,6 @@ make_dir_fail_test() ->
-     ok.
- mkdtemp_test() ->
--    crypto:start(),
-     D = mkdtemp(),
-     ?assertEqual(
-        true,
-@@ -257,7 +254,6 @@ mkdtemp_test() ->
-     ok.
- rmtempdir_test() ->
--    crypto:start(),
-     D1 = mkdtemp(),
-     ?assertEqual(
-        true,
-diff --git a/src/mochiweb.app.src b/src/mochiweb.app.src
-index 8d75a3a..c98d8a0 100644
---- a/src/mochiweb.app.src
-+++ b/src/mochiweb.app.src
-@@ -5,5 +5,5 @@
-   {modules, []},
-   {registered, []},
-   {env, []},
--  {applications, [kernel, stdlib, crypto, inets, ssl, xmerl,
-+  {applications, [kernel, stdlib, inets, xmerl,
-                   compiler, syntax_tools]}]}.
-diff --git a/src/mochiweb_multipart.erl b/src/mochiweb_multipart.erl
-index a83a88c..a4857d6 100644
---- a/src/mochiweb_multipart.erl
-+++ b/src/mochiweb_multipart.erl
-@@ -38,7 +38,7 @@ parts_to_body([{Start, End, Body}], ContentType, Size) ->
-     {HeaderList, Body};
- parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
-     parts_to_multipart_body(BodyList, ContentType, Size,
--                            mochihex:to_hex(crypto:rand_bytes(8))).
-+                            mochihex:to_hex(mochiweb_util:rand_bytes(8))).
- %% @spec parts_to_multipart_body([bodypart()], ContentType::string(),
- %%                               Size::integer(), Boundary::string()) ->
-diff --git a/src/mochiweb_util.erl b/src/mochiweb_util.erl
-index 4d39990..a0bc2bc 100644
---- a/src/mochiweb_util.erl
-+++ b/src/mochiweb_util.erl
-@@ -13,7 +13,7 @@
- -export([record_to_proplist/2, record_to_proplist/3]).
- -export([safe_relative_path/1, partition/2]).
- -export([parse_qvalues/1, pick_accepted_encodings/3]).
---export([make_io/1]).
-+-export([make_io/1, rand_bytes/1, rand_uniform/2]).
- -define(PERCENT, 37).  % $\%
- -define(FULLSTOP, 46). % $\.
-@@ -581,6 +581,12 @@ make_io(Integer) when is_integer(Integer) ->
- make_io(Io) when is_list(Io); is_binary(Io) ->
-     Io.
-+rand_bytes(Count) ->
-+    list_to_binary([rand_uniform(0, 16#FF + 1) || _ <- lists:seq(1, Count)]).
-+
-+rand_uniform(Lo, Hi) ->
-+    random:uniform(Hi - Lo) + Lo - 1.
-+
- %%
- %% Tests
- %%
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch
deleted file mode 100644 (file)
index c9938e5..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-diff --git a/src/mochiglobal.erl b/src/mochiglobal.erl
-deleted file mode 100644
-index 6b20e41..0000000
---- a/src/mochiglobal.erl
-+++ /dev/null
-@@ -1,107 +0,0 @@
--%% @author Bob Ippolito <bob@mochimedia.com>
--%% @copyright 2010 Mochi Media, Inc.
--%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6)
--%%      <a href="http://www.erlang.org/pipermail/erlang-questions/2009-March/042503.html">[1]</a>.
---module(mochiglobal).
---author("Bob Ippolito <bob@mochimedia.com>").
---export([get/1, get/2, put/2, delete/1]).
--
--%% -spec get(atom()) -> any() | undefined.
--%% @equiv get(K, undefined)
--get(K) ->
--    get(K, undefined).
--
--%% -spec get(atom(), T) -> any() | T.
--%% @doc Get the term for K or return Default.
--get(K, Default) ->
--    get(K, Default, key_to_module(K)).
--
--get(_K, Default, Mod) ->
--    try Mod:term()
--    catch error:undef ->
--            Default
--    end.
--
--%% -spec put(atom(), any()) -> ok.
--%% @doc Store term V at K, replaces an existing term if present.
--put(K, V) ->
--    put(K, V, key_to_module(K)).
--
--put(_K, V, Mod) ->
--    Bin = compile(Mod, V),
--    code:purge(Mod),
--    {module, Mod} = code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
--    ok.
--
--%% -spec delete(atom()) -> boolean().
--%% @doc Delete term stored at K, no-op if non-existent.
--delete(K) ->
--    delete(K, key_to_module(K)).
--
--delete(_K, Mod) ->
--    code:purge(Mod),
--    code:delete(Mod).
--
--%% -spec key_to_module(atom()) -> atom().
--key_to_module(K) ->
--    list_to_atom("mochiglobal:" ++ atom_to_list(K)).
--
--%% -spec compile(atom(), any()) -> binary().
--compile(Module, T) ->
--    {ok, Module, Bin} = compile:forms(forms(Module, T),
--                                      [verbose, report_errors]),
--    Bin.
--
--%% -spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
--forms(Module, T) ->
--    [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
--
--%% -spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
--term_to_abstract(Module, Getter, T) ->
--    [%% -module(Module).
--     erl_syntax:attribute(
--       erl_syntax:atom(module),
--       [erl_syntax:atom(Module)]),
--     %% -export([Getter/0]).
--     erl_syntax:attribute(
--       erl_syntax:atom(export),
--       [erl_syntax:list(
--         [erl_syntax:arity_qualifier(
--            erl_syntax:atom(Getter),
--            erl_syntax:integer(0))])]),
--     %% Getter() -> T.
--     erl_syntax:function(
--       erl_syntax:atom(Getter),
--       [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])].
--
--%%
--%% Tests
--%%
---ifdef(TEST).
---include_lib("eunit/include/eunit.hrl").
--get_put_delete_test() ->
--    K = '$$test$$mochiglobal',
--    delete(K),
--    ?assertEqual(
--       bar,
--       get(K, bar)),
--    try
--        ?MODULE:put(K, baz),
--        ?assertEqual(
--           baz,
--           get(K, bar)),
--        ?MODULE:put(K, wibble),
--        ?assertEqual(
--           wibble,
--           ?MODULE:get(K))
--    after
--        delete(K)
--    end,
--    ?assertEqual(
--       bar,
--       get(K, bar)),
--    ?assertEqual(
--       undefined,
--       ?MODULE:get(K)),
--    ok.
---endif.
-diff --git a/src/mochiweb.app.src b/src/mochiweb.app.src
-index c98d8a0..4a6808e 100644
---- a/src/mochiweb.app.src
-+++ b/src/mochiweb.app.src
-@@ -5,5 +5,4 @@
-   {modules, []},
-   {registered, []},
-   {env, []},
--  {applications, [kernel, stdlib, inets, xmerl,
--                  compiler, syntax_tools]}]}.
-+  {applications, [kernel, stdlib, inets, xmerl]}]}.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch
deleted file mode 100644 (file)
index 8c7597f..0000000
+++ /dev/null
@@ -1,1255 +0,0 @@
-diff --git a/src/mochijson2.erl b/src/mochijson2.erl
-deleted file mode 100644
-index 2b8d16e..0000000
---- a/src/mochijson2.erl
-+++ /dev/null
-@@ -1,889 +0,0 @@
--%% @author Bob Ippolito <bob@mochimedia.com>
--%% @copyright 2007 Mochi Media, Inc.
--
--%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works
--%%      with binaries as strings, arrays as lists (without an {array, _})
--%%      wrapper and it only knows how to decode UTF-8 (and ASCII).
--%%
--%%      JSON terms are decoded as follows (javascript -> erlang):
--%%      <ul>
--%%          <li>{"key": "value"} ->
--%%              {struct, [{&lt;&lt;"key">>, &lt;&lt;"value">>}]}</li>
--%%          <li>["array", 123, 12.34, true, false, null] ->
--%%              [&lt;&lt;"array">>, 123, 12.34, true, false, null]
--%%          </li>
--%%      </ul>
--%%      <ul>
--%%          <li>Strings in JSON decode to UTF-8 binaries in Erlang</li>
--%%          <li>Objects decode to {struct, PropList}</li>
--%%          <li>Numbers decode to integer or float</li>
--%%          <li>true, false, null decode to their respective terms.</li>
--%%      </ul>
--%%      The encoder will accept the same format that the decoder will produce,
--%%      but will also allow additional cases for leniency:
--%%      <ul>
--%%          <li>atoms other than true, false, null will be considered UTF-8
--%%              strings (even as a proplist key)
--%%          </li>
--%%          <li>{json, IoList} will insert IoList directly into the output
--%%              with no validation
--%%          </li>
--%%          <li>{array, Array} will be encoded as Array
--%%              (legacy mochijson style)
--%%          </li>
--%%          <li>A non-empty raw proplist will be encoded as an object as long
--%%              as the first pair does not have an atom key of json, struct,
--%%              or array
--%%          </li>
--%%      </ul>
--
---module(mochijson2).
---author('bob@mochimedia.com').
---export([encoder/1, encode/1]).
---export([decoder/1, decode/1, decode/2]).
--
--%% This is a macro to placate syntax highlighters..
---define(Q, $\").
---define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset,
--                                 column=N+S#decoder.column}).
---define(INC_COL(S), S#decoder{offset=1+S#decoder.offset,
--                              column=1+S#decoder.column}).
---define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset,
--                               column=1,
--                               line=1+S#decoder.line}).
---define(INC_CHAR(S, C),
--        case C of
--            $\n ->
--                S#decoder{column=1,
--                          line=1+S#decoder.line,
--                          offset=1+S#decoder.offset};
--            _ ->
--                S#decoder{column=1+S#decoder.column,
--                          offset=1+S#decoder.offset}
--        end).
---define(IS_WHITESPACE(C),
--        (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
--
--%% @type json_string() = atom | binary()
--%% @type json_number() = integer() | float()
--%% @type json_array() = [json_term()]
--%% @type json_object() = {struct, [{json_string(), json_term()}]}
--%% @type json_eep18_object() = {[{json_string(), json_term()}]}
--%% @type json_iolist() = {json, iolist()}
--%% @type json_term() = json_string() | json_number() | json_array() |
--%%                     json_object() | json_eep18_object() | json_iolist()
--
---record(encoder, {handler=null,
--                  utf8=false}).
--
---record(decoder, {object_hook=null,
--                  offset=0,
--                  line=1,
--                  column=1,
--                  state=null}).
--
--%% @spec encoder([encoder_option()]) -> function()
--%% @doc Create an encoder/1 with the given options.
--%% @type encoder_option() = handler_option() | utf8_option()
--%% @type utf8_option() = boolean(). Emit unicode as utf8 (default - false)
--encoder(Options) ->
--    State = parse_encoder_options(Options, #encoder{}),
--    fun (O) -> json_encode(O, State) end.
--
--%% @spec encode(json_term()) -> iolist()
--%% @doc Encode the given as JSON to an iolist.
--encode(Any) ->
--    json_encode(Any, #encoder{}).
--
--%% @spec decoder([decoder_option()]) -> function()
--%% @doc Create a decoder/1 with the given options.
--decoder(Options) ->
--    State = parse_decoder_options(Options, #decoder{}),
--    fun (O) -> json_decode(O, State) end.
--
--%% @spec decode(iolist(), [{format, proplist | eep18 | struct}]) -> json_term()
--%% @doc Decode the given iolist to Erlang terms using the given object format
--%%      for decoding, where proplist returns JSON objects as [{binary(), json_term()}]
--%%      proplists, eep18 returns JSON objects as {[binary(), json_term()]}, and struct
--%%      returns them as-is.
--decode(S, Options) ->
--    json_decode(S, parse_decoder_options(Options, #decoder{})).
--
--%% @spec decode(iolist()) -> json_term()
--%% @doc Decode the given iolist to Erlang terms.
--decode(S) ->
--    json_decode(S, #decoder{}).
--
--%% Internal API
--
--parse_encoder_options([], State) ->
--    State;
--parse_encoder_options([{handler, Handler} | Rest], State) ->
--    parse_encoder_options(Rest, State#encoder{handler=Handler});
--parse_encoder_options([{utf8, Switch} | Rest], State) ->
--    parse_encoder_options(Rest, State#encoder{utf8=Switch}).
--
--parse_decoder_options([], State) ->
--    State;
--parse_decoder_options([{object_hook, Hook} | Rest], State) ->
--    parse_decoder_options(Rest, State#decoder{object_hook=Hook});
--parse_decoder_options([{format, Format} | Rest], State)
--  when Format =:= struct orelse Format =:= eep18 orelse Format =:= proplist ->
--    parse_decoder_options(Rest, State#decoder{object_hook=Format}).
--
--json_encode(true, _State) ->
--    <<"true">>;
--json_encode(false, _State) ->
--    <<"false">>;
--json_encode(null, _State) ->
--    <<"null">>;
--json_encode(I, _State) when is_integer(I) ->
--    integer_to_list(I);
--json_encode(F, _State) when is_float(F) ->
--    mochinum:digits(F);
--json_encode(S, State) when is_binary(S); is_atom(S) ->
--    json_encode_string(S, State);
--json_encode([{K, _}|_] = Props, State) when (K =/= struct andalso
--                                             K =/= array andalso
--                                             K =/= json) ->
--    json_encode_proplist(Props, State);
--json_encode({struct, Props}, State) when is_list(Props) ->
--    json_encode_proplist(Props, State);
--json_encode({Props}, State) when is_list(Props) ->
--    json_encode_proplist(Props, State);
--json_encode({}, State) ->
--    json_encode_proplist([], State);
--json_encode(Array, State) when is_list(Array) ->
--    json_encode_array(Array, State);
--json_encode({array, Array}, State) when is_list(Array) ->
--    json_encode_array(Array, State);
--json_encode({json, IoList}, _State) ->
--    IoList;
--json_encode(Bad, #encoder{handler=null}) ->
--    exit({json_encode, {bad_term, Bad}});
--json_encode(Bad, State=#encoder{handler=Handler}) ->
--    json_encode(Handler(Bad), State).
--
--json_encode_array([], _State) ->
--    <<"[]">>;
--json_encode_array(L, State) ->
--    F = fun (O, Acc) ->
--                [$,, json_encode(O, State) | Acc]
--        end,
--    [$, | Acc1] = lists:foldl(F, "[", L),
--    lists:reverse([$\] | Acc1]).
--
--json_encode_proplist([], _State) ->
--    <<"{}">>;
--json_encode_proplist(Props, State) ->
--    F = fun ({K, V}, Acc) ->
--                KS = json_encode_string(K, State),
--                VS = json_encode(V, State),
--                [$,, VS, $:, KS | Acc]
--        end,
--    [$, | Acc1] = lists:foldl(F, "{", Props),
--    lists:reverse([$\} | Acc1]).
--
--json_encode_string(A, State) when is_atom(A) ->
--    L = atom_to_list(A),
--    case json_string_is_safe(L) of
--        true ->
--            [?Q, L, ?Q];
--        false ->
--            json_encode_string_unicode(xmerl_ucs:from_utf8(L), State, [?Q])
--    end;
--json_encode_string(B, State) when is_binary(B) ->
--    case json_bin_is_safe(B) of
--        true ->
--            [?Q, B, ?Q];
--        false ->
--            json_encode_string_unicode(xmerl_ucs:from_utf8(B), State, [?Q])
--    end;
--json_encode_string(I, _State) when is_integer(I) ->
--    [?Q, integer_to_list(I), ?Q];
--json_encode_string(L, State) when is_list(L) ->
--    case json_string_is_safe(L) of
--        true ->
--            [?Q, L, ?Q];
--        false ->
--            json_encode_string_unicode(L, State, [?Q])
--    end.
--
--json_string_is_safe([]) ->
--    true;
--json_string_is_safe([C | Rest]) ->
--    case C of
--        ?Q ->
--            false;
--        $\\ ->
--            false;
--        $\b ->
--            false;
--        $\f ->
--            false;
--        $\n ->
--            false;
--        $\r ->
--            false;
--        $\t ->
--            false;
--        C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
--            false;
--        C when C < 16#7f ->
--            json_string_is_safe(Rest);
--        _ ->
--            false
--    end.
--
--json_bin_is_safe(<<>>) ->
--    true;
--json_bin_is_safe(<<C, Rest/binary>>) ->
--    case C of
--        ?Q ->
--            false;
--        $\\ ->
--            false;
--        $\b ->
--            false;
--        $\f ->
--            false;
--        $\n ->
--            false;
--        $\r ->
--            false;
--        $\t ->
--            false;
--        C when C >= 0, C < $\s; C >= 16#7f ->
--            false;
--        C when C < 16#7f ->
--            json_bin_is_safe(Rest)
--    end.
--
--json_encode_string_unicode([], _State, Acc) ->
--    lists:reverse([$\" | Acc]);
--json_encode_string_unicode([C | Cs], State, Acc) ->
--    Acc1 = case C of
--               ?Q ->
--                   [?Q, $\\ | Acc];
--               %% Escaping solidus is only useful when trying to protect
--               %% against "</script>" injection attacks which are only
--               %% possible when JSON is inserted into a HTML document
--               %% in-line. mochijson2 does not protect you from this, so
--               %% if you do insert directly into HTML then you need to
--               %% uncomment the following case or escape the output of encode.
--               %%
--               %% $/ ->
--               %%    [$/, $\\ | Acc];
--               %%
--               $\\ ->
--                   [$\\, $\\ | Acc];
--               $\b ->
--                   [$b, $\\ | Acc];
--               $\f ->
--                   [$f, $\\ | Acc];
--               $\n ->
--                   [$n, $\\ | Acc];
--               $\r ->
--                   [$r, $\\ | Acc];
--               $\t ->
--                   [$t, $\\ | Acc];
--               C when C >= 0, C < $\s ->
--                   [unihex(C) | Acc];
--               C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 ->
--                   [xmerl_ucs:to_utf8(C) | Acc];
--               C when  C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 ->
--                   [unihex(C) | Acc];
--               C when C < 16#7f ->
--                   [C | Acc];
--               _ ->
--                   exit({json_encode, {bad_char, C}})
--           end,
--    json_encode_string_unicode(Cs, State, Acc1).
--
--hexdigit(C) when C >= 0, C =< 9 ->
--    C + $0;
--hexdigit(C) when C =< 15 ->
--    C + $a - 10.
--
--unihex(C) when C < 16#10000 ->
--    <<D3:4, D2:4, D1:4, D0:4>> = <<C:16>>,
--    Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
--    [$\\, $u | Digits];
--unihex(C) when C =< 16#10FFFF ->
--    N = C - 16#10000,
--    S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
--    S2 = 16#dc00 bor (N band 16#3ff),
--    [unihex(S1), unihex(S2)].
--
--json_decode(L, S) when is_list(L) ->
--    json_decode(iolist_to_binary(L), S);
--json_decode(B, S) ->
--    {Res, S1} = decode1(B, S),
--    {eof, _} = tokenize(B, S1#decoder{state=trim}),
--    Res.
--
--decode1(B, S=#decoder{state=null}) ->
--    case tokenize(B, S#decoder{state=any}) of
--        {{const, C}, S1} ->
--            {C, S1};
--        {start_array, S1} ->
--            decode_array(B, S1);
--        {start_object, S1} ->
--            decode_object(B, S1)
--    end.
--
--make_object(V, #decoder{object_hook=N}) when N =:= null orelse N =:= struct ->
--    V;
--make_object({struct, P}, #decoder{object_hook=eep18}) ->
--    {P};
--make_object({struct, P}, #decoder{object_hook=proplist}) ->
--    P;
--make_object(V, #decoder{object_hook=Hook}) ->
--    Hook(V).
--
--decode_object(B, S) ->
--    decode_object(B, S#decoder{state=key}, []).
--
--decode_object(B, S=#decoder{state=key}, Acc) ->
--    case tokenize(B, S) of
--        {end_object, S1} ->
--            V = make_object({struct, lists:reverse(Acc)}, S1),
--            {V, S1#decoder{state=null}};
--        {{const, K}, S1} ->
--            {colon, S2} = tokenize(B, S1),
--            {V, S3} = decode1(B, S2#decoder{state=null}),
--            decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc])
--    end;
--decode_object(B, S=#decoder{state=comma}, Acc) ->
--    case tokenize(B, S) of
--        {end_object, S1} ->
--            V = make_object({struct, lists:reverse(Acc)}, S1),
--            {V, S1#decoder{state=null}};
--        {comma, S1} ->
--            decode_object(B, S1#decoder{state=key}, Acc)
--    end.
--
--decode_array(B, S) ->
--    decode_array(B, S#decoder{state=any}, []).
--
--decode_array(B, S=#decoder{state=any}, Acc) ->
--    case tokenize(B, S) of
--        {end_array, S1} ->
--            {lists:reverse(Acc), S1#decoder{state=null}};
--        {start_array, S1} ->
--            {Array, S2} = decode_array(B, S1),
--            decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
--        {start_object, S1} ->
--            {Array, S2} = decode_object(B, S1),
--            decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
--        {{const, Const}, S1} ->
--            decode_array(B, S1#decoder{state=comma}, [Const | Acc])
--    end;
--decode_array(B, S=#decoder{state=comma}, Acc) ->
--    case tokenize(B, S) of
--        {end_array, S1} ->
--            {lists:reverse(Acc), S1#decoder{state=null}};
--        {comma, S1} ->
--            decode_array(B, S1#decoder{state=any}, Acc)
--    end.
--
--tokenize_string(B, S=#decoder{offset=O}) ->
--    case tokenize_string_fast(B, O) of
--        {escape, O1} ->
--            Length = O1 - O,
--            S1 = ?ADV_COL(S, Length),
--            <<_:O/binary, Head:Length/binary, _/binary>> = B,
--            tokenize_string(B, S1, lists:reverse(binary_to_list(Head)));
--        O1 ->
--            Length = O1 - O,
--            <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B,
--            {{const, String}, ?ADV_COL(S, Length + 1)}
--    end.
--
--tokenize_string_fast(B, O) ->
--    case B of
--        <<_:O/binary, ?Q, _/binary>> ->
--            O;
--        <<_:O/binary, $\\, _/binary>> ->
--            {escape, O};
--        <<_:O/binary, C1, _/binary>> when C1 < 128 ->
--            tokenize_string_fast(B, 1 + O);
--        <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
--                C2 >= 128, C2 =< 191 ->
--            tokenize_string_fast(B, 2 + O);
--        <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
--                C2 >= 128, C2 =< 191,
--                C3 >= 128, C3 =< 191 ->
--            tokenize_string_fast(B, 3 + O);
--        <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
--                C2 >= 128, C2 =< 191,
--                C3 >= 128, C3 =< 191,
--                C4 >= 128, C4 =< 191 ->
--            tokenize_string_fast(B, 4 + O);
--        _ ->
--            throw(invalid_utf8)
--    end.
--
--tokenize_string(B, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, ?Q, _/binary>> ->
--            {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)};
--        <<_:O/binary, "\\\"", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]);
--        <<_:O/binary, "\\\\", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]);
--        <<_:O/binary, "\\/", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]);
--        <<_:O/binary, "\\b", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]);
--        <<_:O/binary, "\\f", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]);
--        <<_:O/binary, "\\n", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]);
--        <<_:O/binary, "\\r", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]);
--        <<_:O/binary, "\\t", _/binary>> ->
--            tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]);
--        <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> ->
--            C = erlang:list_to_integer([C3, C2, C1, C0], 16),
--            if C > 16#D7FF, C < 16#DC00 ->
--                %% coalesce UTF-16 surrogate pair
--                <<"\\u", D3, D2, D1, D0, _/binary>> = Rest,
--                D = erlang:list_to_integer([D3,D2,D1,D0], 16),
--                [CodePoint] = xmerl_ucs:from_utf16be(<<C:16/big-unsigned-integer,
--                    D:16/big-unsigned-integer>>),
--                Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc),
--                tokenize_string(B, ?ADV_COL(S, 12), Acc1);
--            true ->
--                Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc),
--                tokenize_string(B, ?ADV_COL(S, 6), Acc1)
--            end;
--        <<_:O/binary, C1, _/binary>> when C1 < 128 ->
--            tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]);
--        <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
--                C2 >= 128, C2 =< 191 ->
--            tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]);
--        <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
--                C2 >= 128, C2 =< 191,
--                C3 >= 128, C3 =< 191 ->
--            tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]);
--        <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
--                C2 >= 128, C2 =< 191,
--                C3 >= 128, C3 =< 191,
--                C4 >= 128, C4 =< 191 ->
--            tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]);
--        _ ->
--            throw(invalid_utf8)
--    end.
--
--tokenize_number(B, S) ->
--    case tokenize_number(B, sign, S, []) of
--        {{int, Int}, S1} ->
--            {{const, list_to_integer(Int)}, S1};
--        {{float, Float}, S1} ->
--            {{const, list_to_float(Float)}, S1}
--    end.
--
--tokenize_number(B, sign, S=#decoder{offset=O}, []) ->
--    case B of
--        <<_:O/binary, $-, _/binary>> ->
--            tokenize_number(B, int, ?INC_COL(S), [$-]);
--        _ ->
--            tokenize_number(B, int, S, [])
--    end;
--tokenize_number(B, int, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, $0, _/binary>> ->
--            tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]);
--        <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 ->
--            tokenize_number(B, int1, ?INC_COL(S), [C | Acc])
--    end;
--tokenize_number(B, int1, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
--            tokenize_number(B, int1, ?INC_COL(S), [C | Acc]);
--        _ ->
--            tokenize_number(B, frac, S, Acc)
--    end;
--tokenize_number(B, frac, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 ->
--            tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
--        <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
--            tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
--        _ ->
--            {{int, lists:reverse(Acc)}, S}
--    end;
--tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
--            tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]);
--        <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
--            tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]);
--        _ ->
--            {{float, lists:reverse(Acc)}, S}
--    end;
--tokenize_number(B, esign, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ ->
--            tokenize_number(B, eint, ?INC_COL(S), [C | Acc]);
--        _ ->
--            tokenize_number(B, eint, S, Acc)
--    end;
--tokenize_number(B, eint, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
--            tokenize_number(B, eint1, ?INC_COL(S), [C | Acc])
--    end;
--tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) ->
--    case B of
--        <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
--            tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]);
--        _ ->
--            {{float, lists:reverse(Acc)}, S}
--    end.
--
--tokenize(B, S=#decoder{offset=O}) ->
--    case B of
--        <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
--            tokenize(B, ?INC_CHAR(S, C));
--        <<_:O/binary, "{", _/binary>> ->
--            {start_object, ?INC_COL(S)};
--        <<_:O/binary, "}", _/binary>> ->
--            {end_object, ?INC_COL(S)};
--        <<_:O/binary, "[", _/binary>> ->
--            {start_array, ?INC_COL(S)};
--        <<_:O/binary, "]", _/binary>> ->
--            {end_array, ?INC_COL(S)};
--        <<_:O/binary, ",", _/binary>> ->
--            {comma, ?INC_COL(S)};
--        <<_:O/binary, ":", _/binary>> ->
--            {colon, ?INC_COL(S)};
--        <<_:O/binary, "null", _/binary>> ->
--            {{const, null}, ?ADV_COL(S, 4)};
--        <<_:O/binary, "true", _/binary>> ->
--            {{const, true}, ?ADV_COL(S, 4)};
--        <<_:O/binary, "false", _/binary>> ->
--            {{const, false}, ?ADV_COL(S, 5)};
--        <<_:O/binary, "\"", _/binary>> ->
--            tokenize_string(B, ?INC_COL(S));
--        <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9)
--                                         orelse C =:= $- ->
--            tokenize_number(B, S);
--        <<_:O/binary>> ->
--            trim = S#decoder.state,
--            {eof, S}
--    end.
--%%
--%% Tests
--%%
---ifdef(TEST).
---include_lib("eunit/include/eunit.hrl").
--
--
--%% testing constructs borrowed from the Yaws JSON implementation.
--
--%% Create an object from a list of Key/Value pairs.
--
--obj_new() ->
--    {struct, []}.
--
--is_obj({struct, Props}) ->
--    F = fun ({K, _}) when is_binary(K) -> true end,
--    lists:all(F, Props).
--
--obj_from_list(Props) ->
--    Obj = {struct, Props},
--    ?assert(is_obj(Obj)),
--    Obj.
--
--%% Test for equivalence of Erlang terms.
--%% Due to arbitrary order of construction, equivalent objects might
--%% compare unequal as erlang terms, so we need to carefully recurse
--%% through aggregates (tuples and objects).
--
--equiv({struct, Props1}, {struct, Props2}) ->
--    equiv_object(Props1, Props2);
--equiv(L1, L2) when is_list(L1), is_list(L2) ->
--    equiv_list(L1, L2);
--equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
--equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
--equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true.
--
--%% Object representation and traversal order is unknown.
--%% Use the sledgehammer and sort property lists.
--
--equiv_object(Props1, Props2) ->
--    L1 = lists:keysort(1, Props1),
--    L2 = lists:keysort(1, Props2),
--    Pairs = lists:zip(L1, L2),
--    true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
--                             equiv(K1, K2) and equiv(V1, V2)
--                     end, Pairs).
--
--%% Recursively compare tuple elements for equivalence.
--
--equiv_list([], []) ->
--    true;
--equiv_list([V1 | L1], [V2 | L2]) ->
--    equiv(V1, V2) andalso equiv_list(L1, L2).
--
--decode_test() ->
--    [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
--    <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]).
--
--e2j_vec_test() ->
--    test_one(e2j_test_vec(utf8), 1).
--
--test_one([], _N) ->
--    %% io:format("~p tests passed~n", [N-1]),
--    ok;
--test_one([{E, J} | Rest], N) ->
--    %% io:format("[~p] ~p ~p~n", [N, E, J]),
--    true = equiv(E, decode(J)),
--    true = equiv(E, decode(encode(E))),
--    test_one(Rest, 1+N).
--
--e2j_test_vec(utf8) ->
--    [
--     {1, "1"},
--     {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes
--     {-1, "-1"},
--     {-3.1416, "-3.14160"},
--     {12.0e10, "1.20000e+11"},
--     {1.234E+10, "1.23400e+10"},
--     {-1.234E-10, "-1.23400e-10"},
--     {10.0, "1.0e+01"},
--     {123.456, "1.23456E+2"},
--     {10.0, "1e1"},
--     {<<"foo">>, "\"foo\""},
--     {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""},
--     {<<"">>, "\"\""},
--     {<<"\n\n\n">>, "\"\\n\\n\\n\""},
--     {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
--     {obj_new(), "{}"},
--     {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"},
--     {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]),
--      "{\"foo\":\"bar\",\"baz\":123}"},
--     {[], "[]"},
--     {[[]], "[[]]"},
--     {[1, <<"foo">>], "[1,\"foo\"]"},
--
--     %% json array in a json object
--     {obj_from_list([{<<"foo">>, [123]}]),
--      "{\"foo\":[123]}"},
--
--     %% json object in a json object
--     {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]),
--      "{\"foo\":{\"bar\":true}}"},
--
--     %% fold evaluation order
--     {obj_from_list([{<<"foo">>, []},
--                     {<<"bar">>, obj_from_list([{<<"baz">>, true}])},
--                     {<<"alice">>, <<"bob">>}]),
--      "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
--
--     %% json object in a json array
--     {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null],
--      "[-123,\"foo\",{\"bar\":[]},null]"}
--    ].
--
--%% test utf8 encoding
--encoder_utf8_test() ->
--    %% safe conversion case (default)
--    [34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] =
--        encode(<<1,"\321\202\320\265\321\201\321\202">>),
--
--    %% raw utf8 output (optional)
--    Enc = mochijson2:encoder([{utf8, true}]),
--    [34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] =
--        Enc(<<1,"\321\202\320\265\321\201\321\202">>).
--
--input_validation_test() ->
--    Good = [
--        {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, %% pound
--        {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, %% euro
--        {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} %% denarius
--    ],
--    lists:foreach(fun({CodePoint, UTF8}) ->
--        Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)),
--        Expect = decode(UTF8)
--    end, Good),
--
--    Bad = [
--        %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
--        <<?Q, 16#80, ?Q>>,
--        %% missing continuations, last byte in each should be 80-BF
--        <<?Q, 16#C2, 16#7F, ?Q>>,
--        <<?Q, 16#E0, 16#80,16#7F, ?Q>>,
--        <<?Q, 16#F0, 16#80, 16#80, 16#7F, ?Q>>,
--        %% we don't support code points > 10FFFF per RFC 3629
--        <<?Q, 16#F5, 16#80, 16#80, 16#80, ?Q>>,
--        %% escape characters trigger a different code path
--        <<?Q, $\\, $\n, 16#80, ?Q>>
--    ],
--    lists:foreach(
--      fun(X) ->
--              ok = try decode(X) catch invalid_utf8 -> ok end,
--              %% could be {ucs,{bad_utf8_character_code}} or
--              %%          {json_encode,{bad_char,_}}
--              {'EXIT', _} = (catch encode(X))
--      end, Bad).
--
--inline_json_test() ->
--    ?assertEqual(<<"\"iodata iodata\"">>,
--                 iolist_to_binary(
--                   encode({json, [<<"\"iodata">>, " iodata\""]}))),
--    ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]},
--                 decode(
--                   encode({struct,
--                           [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))),
--    ok.
--
--big_unicode_test() ->
--    UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)),
--    ?assertEqual(
--       <<"\"\\ud834\\udd20\"">>,
--       iolist_to_binary(encode(UTF8Seq))),
--    ?assertEqual(
--       UTF8Seq,
--       decode(iolist_to_binary(encode(UTF8Seq)))),
--    ok.
--
--custom_decoder_test() ->
--    ?assertEqual(
--       {struct, [{<<"key">>, <<"value">>}]},
--       (decoder([]))("{\"key\": \"value\"}")),
--    F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end,
--    ?assertEqual(
--       win,
--       (decoder([{object_hook, F}]))("{\"key\": \"value\"}")),
--    ok.
--
--atom_test() ->
--    %% JSON native atoms
--    [begin
--         ?assertEqual(A, decode(atom_to_list(A))),
--         ?assertEqual(iolist_to_binary(atom_to_list(A)),
--                      iolist_to_binary(encode(A)))
--     end || A <- [true, false, null]],
--    %% Atom to string
--    ?assertEqual(
--       <<"\"foo\"">>,
--       iolist_to_binary(encode(foo))),
--    ?assertEqual(
--       <<"\"\\ud834\\udd20\"">>,
--       iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))),
--    ok.
--
--key_encode_test() ->
--    %% Some forms are accepted as keys that would not be strings in other
--    %% cases
--    ?assertEqual(
--       <<"{\"foo\":1}">>,
--       iolist_to_binary(encode({struct, [{foo, 1}]}))),
--    ?assertEqual(
--       <<"{\"foo\":1}">>,
--       iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))),
--    ?assertEqual(
--       <<"{\"foo\":1}">>,
--       iolist_to_binary(encode({struct, [{"foo", 1}]}))),
--      ?assertEqual(
--       <<"{\"foo\":1}">>,
--       iolist_to_binary(encode([{foo, 1}]))),
--    ?assertEqual(
--       <<"{\"foo\":1}">>,
--       iolist_to_binary(encode([{<<"foo">>, 1}]))),
--    ?assertEqual(
--       <<"{\"foo\":1}">>,
--       iolist_to_binary(encode([{"foo", 1}]))),
--    ?assertEqual(
--       <<"{\"\\ud834\\udd20\":1}">>,
--       iolist_to_binary(
--         encode({struct, [{[16#0001d120], 1}]}))),
--    ?assertEqual(
--       <<"{\"1\":1}">>,
--       iolist_to_binary(encode({struct, [{1, 1}]}))),
--    ok.
--
--unsafe_chars_test() ->
--    Chars = "\"\\\b\f\n\r\t",
--    [begin
--         ?assertEqual(false, json_string_is_safe([C])),
--         ?assertEqual(false, json_bin_is_safe(<<C>>)),
--         ?assertEqual(<<C>>, decode(encode(<<C>>)))
--     end || C <- Chars],
--    ?assertEqual(
--       false,
--       json_string_is_safe([16#0001d120])),
--    ?assertEqual(
--       false,
--       json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))),
--    ?assertEqual(
--       [16#0001d120],
--       xmerl_ucs:from_utf8(
--         binary_to_list(
--           decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))),
--    ?assertEqual(
--       false,
--       json_string_is_safe([16#110000])),
--    ?assertEqual(
--       false,
--       json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))),
--    %% solidus can be escaped but isn't unsafe by default
--    ?assertEqual(
--       <<"/">>,
--       decode(<<"\"\\/\"">>)),
--    ok.
--
--int_test() ->
--    ?assertEqual(0, decode("0")),
--    ?assertEqual(1, decode("1")),
--    ?assertEqual(11, decode("11")),
--    ok.
--
--large_int_test() ->
--    ?assertEqual(<<"-2147483649214748364921474836492147483649">>,
--        iolist_to_binary(encode(-2147483649214748364921474836492147483649))),
--    ?assertEqual(<<"2147483649214748364921474836492147483649">>,
--        iolist_to_binary(encode(2147483649214748364921474836492147483649))),
--    ok.
--
--float_test() ->
--    ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))),
--    ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))),
--    ok.
--
--handler_test() ->
--    ?assertEqual(
--       {'EXIT',{json_encode,{bad_term,{x,y}}}},
--       catch encode({x,y})),
--    F = fun ({x,y}) -> [] end,
--    ?assertEqual(
--       <<"[]">>,
--       iolist_to_binary((encoder([{handler, F}]))({x, y}))),
--    ok.
--
--encode_empty_test_() ->
--    [{A, ?_assertEqual(<<"{}">>, iolist_to_binary(encode(B)))}
--     || {A, B} <- [{"eep18 {}", {}},
--                   {"eep18 {[]}", {[]}},
--                   {"{struct, []}", {struct, []}}]].
--
--encode_test_() ->
--    P = [{<<"k">>, <<"v">>}],
--    JSON = iolist_to_binary(encode({struct, P})),
--    [{atom_to_list(F),
--      ?_assertEqual(JSON, iolist_to_binary(encode(decode(JSON, [{format, F}]))))}
--     || F <- [struct, eep18, proplist]].
--
--format_test_() ->
--    P = [{<<"k">>, <<"v">>}],
--    JSON = iolist_to_binary(encode({struct, P})),
--    [{atom_to_list(F),
--      ?_assertEqual(A, decode(JSON, [{format, F}]))}
--     || {F, A} <- [{struct, {struct, P}},
--                   {eep18, {P}},
--                   {proplist, P}]].
--
---endif.
-diff --git a/src/mochinum.erl b/src/mochinum.erl
-deleted file mode 100644
-index c52b15c..0000000
---- a/src/mochinum.erl
-+++ /dev/null
-@@ -1,354 +0,0 @@
--%% @copyright 2007 Mochi Media, Inc.
--%% @author Bob Ippolito <bob@mochimedia.com>
--
--%% @doc Useful numeric algorithms for floats that cover some deficiencies
--%% in the math module. More interesting is digits/1, which implements
--%% the algorithm from:
--%% http://www.cs.indiana.edu/~burger/fp/index.html
--%% See also "Printing Floating-Point Numbers Quickly and Accurately"
--%% in Proceedings of the SIGPLAN '96 Conference on Programming Language
--%% Design and Implementation.
--
---module(mochinum).
---author("Bob Ippolito <bob@mochimedia.com>").
---export([digits/1, frexp/1, int_pow/2, int_ceil/1]).
--
--%% IEEE 754 Float exponent bias
---define(FLOAT_BIAS, 1022).
---define(MIN_EXP, -1074).
---define(BIG_POW, 4503599627370496).
--
--%% External API
--
--%% @spec digits(number()) -> string()
--%% @doc  Returns a string that accurately represents the given integer or float
--%%       using a conservative amount of digits. Great for generating
--%%       human-readable output, or compact ASCII serializations for floats.
--digits(N) when is_integer(N) ->
--    integer_to_list(N);
--digits(0.0) ->
--    "0.0";
--digits(Float) ->
--    {Frac1, Exp1} = frexp_int(Float),
--    [Place0 | Digits0] = digits1(Float, Exp1, Frac1),
--    {Place, Digits} = transform_digits(Place0, Digits0),
--    R = insert_decimal(Place, Digits),
--    case Float < 0 of
--        true ->
--            [$- | R];
--        _ ->
--            R
--    end.
--
--%% @spec frexp(F::float()) -> {Frac::float(), Exp::float()}
--%% @doc  Return the fractional and exponent part of an IEEE 754 double,
--%%       equivalent to the libc function of the same name.
--%%       F = Frac * pow(2, Exp).
--frexp(F) ->
--    frexp1(unpack(F)).
--
--%% @spec int_pow(X::integer(), N::integer()) -> Y::integer()
--%% @doc  Moderately efficient way to exponentiate integers.
--%%       int_pow(10, 2) = 100.
--int_pow(_X, 0) ->
--    1;
--int_pow(X, N) when N > 0 ->
--    int_pow(X, N, 1).
--
--%% @spec int_ceil(F::float()) -> integer()
--%% @doc  Return the ceiling of F as an integer. The ceiling is defined as
--%%       F when F == trunc(F);
--%%       trunc(F) when F &lt; 0;
--%%       trunc(F) + 1 when F &gt; 0.
--int_ceil(X) ->
--    T = trunc(X),
--    case (X - T) of
--        Pos when Pos > 0 -> T + 1;
--        _ -> T
--    end.
--
--
--%% Internal API
--
--int_pow(X, N, R) when N < 2 ->
--    R * X;
--int_pow(X, N, R) ->
--    int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end).
--
--insert_decimal(0, S) ->
--    "0." ++ S;
--insert_decimal(Place, S) when Place > 0 ->
--    L = length(S),
--    case Place - L of
--         0 ->
--            S ++ ".0";
--        N when N < 0 ->
--            {S0, S1} = lists:split(L + N, S),
--            S0 ++ "." ++ S1;
--        N when N < 6 ->
--            %% More places than digits
--            S ++ lists:duplicate(N, $0) ++ ".0";
--        _ ->
--            insert_decimal_exp(Place, S)
--    end;
--insert_decimal(Place, S) when Place > -6 ->
--    "0." ++ lists:duplicate(abs(Place), $0) ++ S;
--insert_decimal(Place, S) ->
--    insert_decimal_exp(Place, S).
--
--insert_decimal_exp(Place, S) ->
--    [C | S0] = S,
--    S1 = case S0 of
--             [] ->
--                 "0";
--             _ ->
--                 S0
--         end,
--    Exp = case Place < 0 of
--              true ->
--                  "e-";
--              false ->
--                  "e+"
--          end,
--    [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)).
--
--
--digits1(Float, Exp, Frac) ->
--    Round = ((Frac band 1) =:= 0),
--    case Exp >= 0 of
--        true ->
--            BExp = 1 bsl Exp,
--            case (Frac =/= ?BIG_POW) of
--                true ->
--                    scale((Frac * BExp * 2), 2, BExp, BExp,
--                          Round, Round, Float);
--                false ->
--                    scale((Frac * BExp * 4), 4, (BExp * 2), BExp,
--                          Round, Round, Float)
--            end;
--        false ->
--            case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of
--                true ->
--                    scale((Frac * 2), 1 bsl (1 - Exp), 1, 1,
--                          Round, Round, Float);
--                false ->
--                    scale((Frac * 4), 1 bsl (2 - Exp), 2, 1,
--                          Round, Round, Float)
--            end
--    end.
--
--scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) ->
--    Est = int_ceil(math:log10(abs(Float)) - 1.0e-10),
--    %% Note that the scheme implementation uses a 326 element look-up table
--    %% for int_pow(10, N) where we do not.
--    case Est >= 0 of
--        true ->
--            fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est,
--                  LowOk, HighOk);
--        false ->
--            Scale = int_pow(10, -Est),
--            fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est,
--                  LowOk, HighOk)
--    end.
--
--fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) ->
--    TooLow = case HighOk of
--                 true ->
--                     (R + MPlus) >= S;
--                 false ->
--                     (R + MPlus) > S
--             end,
--    case TooLow of
--        true ->
--            [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)];
--        false ->
--            [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)]
--    end.
--
--generate(R0, S, MPlus, MMinus, LowOk, HighOk) ->
--    D = R0 div S,
--    R = R0 rem S,
--    TC1 = case LowOk of
--              true ->
--                  R =< MMinus;
--              false ->
--                  R < MMinus
--          end,
--    TC2 = case HighOk of
--              true ->
--                  (R + MPlus) >= S;
--              false ->
--                  (R + MPlus) > S
--          end,
--    case TC1 of
--        false ->
--            case TC2 of
--                false ->
--                    [D | generate(R * 10, S, MPlus * 10, MMinus * 10,
--                                  LowOk, HighOk)];
--                true ->
--                    [D + 1]
--            end;
--        true ->
--            case TC2 of
--                false ->
--                    [D];
--                true ->
--                    case R * 2 < S of
--                        true ->
--                            [D];
--                        false ->
--                            [D + 1]
--                    end
--            end
--    end.
--
--unpack(Float) ->
--    <<Sign:1, Exp:11, Frac:52>> = <<Float:64/float>>,
--    {Sign, Exp, Frac}.
--
--frexp1({_Sign, 0, 0}) ->
--    {0.0, 0};
--frexp1({Sign, 0, Frac}) ->
--    Exp = log2floor(Frac),
--    <<Frac1:64/float>> = <<Sign:1, ?FLOAT_BIAS:11, (Frac-1):52>>,
--    {Frac1, -(?FLOAT_BIAS) - 52 + Exp};
--frexp1({Sign, Exp, Frac}) ->
--    <<Frac1:64/float>> = <<Sign:1, ?FLOAT_BIAS:11, Frac:52>>,
--    {Frac1, Exp - ?FLOAT_BIAS}.
--
--log2floor(Int) ->
--    log2floor(Int, 0).
--
--log2floor(0, N) ->
--    N;
--log2floor(Int, N) ->
--    log2floor(Int bsr 1, 1 + N).
--
--
--transform_digits(Place, [0 | Rest]) ->
--    transform_digits(Place, Rest);
--transform_digits(Place, Digits) ->
--    {Place, [$0 + D || D <- Digits]}.
--
--
--frexp_int(F) ->
--    case unpack(F) of
--        {_Sign, 0, Frac} ->
--            {Frac, ?MIN_EXP};
--        {_Sign, Exp, Frac} ->
--            {Frac + (1 bsl 52), Exp - 53 - ?FLOAT_BIAS}
--    end.
--
--%%
--%% Tests
--%%
---ifdef(TEST).
---include_lib("eunit/include/eunit.hrl").
--
--int_ceil_test() ->
--    ?assertEqual(1, int_ceil(0.0001)),
--    ?assertEqual(0, int_ceil(0.0)),
--    ?assertEqual(1, int_ceil(0.99)),
--    ?assertEqual(1, int_ceil(1.0)),
--    ?assertEqual(-1, int_ceil(-1.5)),
--    ?assertEqual(-2, int_ceil(-2.0)),
--    ok.
--
--int_pow_test() ->
--    ?assertEqual(1, int_pow(1, 1)),
--    ?assertEqual(1, int_pow(1, 0)),
--    ?assertEqual(1, int_pow(10, 0)),
--    ?assertEqual(10, int_pow(10, 1)),
--    ?assertEqual(100, int_pow(10, 2)),
--    ?assertEqual(1000, int_pow(10, 3)),
--    ok.
--
--digits_test() ->
--    ?assertEqual("0",
--                 digits(0)),
--    ?assertEqual("0.0",
--                 digits(0.0)),
--    ?assertEqual("1.0",
--                 digits(1.0)),
--    ?assertEqual("-1.0",
--                 digits(-1.0)),
--    ?assertEqual("0.1",
--                 digits(0.1)),
--    ?assertEqual("0.01",
--                 digits(0.01)),
--    ?assertEqual("0.001",
--                 digits(0.001)),
--    ?assertEqual("1.0e+6",
--                 digits(1000000.0)),
--    ?assertEqual("0.5",
--                 digits(0.5)),
--    ?assertEqual("4503599627370496.0",
--                 digits(4503599627370496.0)),
--    %% small denormalized number
--    %% 4.94065645841246544177e-324 =:= 5.0e-324
--    <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
--    ?assertEqual("5.0e-324",
--                 digits(SmallDenorm)),
--    ?assertEqual(SmallDenorm,
--                 list_to_float(digits(SmallDenorm))),
--    %% large denormalized number
--    %% 2.22507385850720088902e-308
--    <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
--    ?assertEqual("2.225073858507201e-308",
--                 digits(BigDenorm)),
--    ?assertEqual(BigDenorm,
--                 list_to_float(digits(BigDenorm))),
--    %% small normalized number
--    %% 2.22507385850720138309e-308
--    <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
--    ?assertEqual("2.2250738585072014e-308",
--                 digits(SmallNorm)),
--    ?assertEqual(SmallNorm,
--                 list_to_float(digits(SmallNorm))),
--    %% large normalized number
--    %% 1.79769313486231570815e+308
--    <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
--    ?assertEqual("1.7976931348623157e+308",
--                 digits(LargeNorm)),
--    ?assertEqual(LargeNorm,
--                 list_to_float(digits(LargeNorm))),
--    %% issue #10 - mochinum:frexp(math:pow(2, -1074)).
--    ?assertEqual("5.0e-324",
--                 digits(math:pow(2, -1074))),
--    ok.
--
--frexp_test() ->
--    %% zero
--    ?assertEqual({0.0, 0}, frexp(0.0)),
--    %% one
--    ?assertEqual({0.5, 1}, frexp(1.0)),
--    %% negative one
--    ?assertEqual({-0.5, 1}, frexp(-1.0)),
--    %% small denormalized number
--    %% 4.94065645841246544177e-324
--    <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
--    ?assertEqual({0.5, -1073}, frexp(SmallDenorm)),
--    %% large denormalized number
--    %% 2.22507385850720088902e-308
--    <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
--    ?assertEqual(
--       {0.99999999999999978, -1022},
--       frexp(BigDenorm)),
--    %% small normalized number
--    %% 2.22507385850720138309e-308
--    <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
--    ?assertEqual({0.5, -1021}, frexp(SmallNorm)),
--    %% large normalized number
--    %% 1.79769313486231570815e+308
--    <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
--    ?assertEqual(
--        {0.99999999999999989, 1024},
--        frexp(LargeNorm)),
--    %% issue #10 - mochinum:frexp(math:pow(2, -1074)).
--    ?assertEqual(
--       {0.5, -1073},
--       frexp(math:pow(2, -1074))),
--    ok.
--
---endif.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile b/rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk b/rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk
deleted file mode 100644 (file)
index d1cebfa..0000000
+++ /dev/null
@@ -1 +0,0 @@
-UPSTREAM_SHORT_HASH:=680dba8
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/license_info b/rabbitmq-server/plugins-src/mochiweb-wrapper/license_info
deleted file mode 100644 (file)
index c72a6af..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-Mochiweb is "Copyright (c) 2007 Mochi Media, Inc." and is covered by
-the MIT license.  It was downloaded from
-http://github.com/mochi/mochiweb/
-
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.done b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml
deleted file mode 100644 (file)
index 43dad1a..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-language: erlang
-notifications:
-  email: false
-otp_release:
-  - R15B02
-  - R15B03
-  - R16B
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md
deleted file mode 100644 (file)
index 06a8b5f..0000000
+++ /dev/null
@@ -1,91 +0,0 @@
-Version 2.7.0 released XXXX-XX-XX
-
-* `mochiweb_socket_server:stop/1` is now a synchronous
-  call instead of an asynchronous cast
-* `mochiweb_html:parse_tokens/1` (and `parse/1`) will now create a
-  html element to wrap documents that have a HTML5 doctype
-  (`<!doctype html>`) but no html element 
-  https://github.com/mochi/mochiweb/issues/110
-
-Version 2.6.0 released 2013-04-15
-
-* Enable R15B gen_tcp workaround only on R15B
-  https://github.com/mochi/mochiweb/pull/107
-
-Version 2.5.0 released 2013-03-04
-
-* Replace now() with os:timestamp() in acceptor (optimization)
-  https://github.com/mochi/mochiweb/pull/102
-* New mochiweb_session module for managing session cookies.
-  NOTE: this module is only supported on R15B02 and later!
-  https://github.com/mochi/mochiweb/pull/94
-* New mochiweb_base64url module for base64url encoding
-  (URL and Filename safe alphabet, see RFC 4648).
-* Fix rebar.config in mochiwebapp_skel to use {branch, "master"}
-  https://github.com/mochi/mochiweb/issues/105
-  
-Version 2.4.2 released 2013-02-05
-
-* Fixed issue in mochiweb_response introduced in v2.4.0
-  https://github.com/mochi/mochiweb/pull/100
-
-Version 2.4.1 released 2013-01-30
-
-* Fixed issue in mochiweb_request introduced in v2.4.0
-  https://github.com/mochi/mochiweb/issues/97
-* Fixed issue in mochifmt_records introduced in v2.4.0
-  https://github.com/mochi/mochiweb/issues/96
-
-Version 2.4.0 released 2013-01-23
-
-* Switch from parameterized modules to explicit tuple module calls for
-  R16 compatibility (#95)
-* Fix for mochiweb_acceptor crash with extra-long HTTP headers under
-  R15B02 (#91)
-* Fix case in handling range headers (#85)
-* Handle combined Content-Length header (#88)
-* Windows security fix for `safe_relative_path`, any path with a
-  backslash on any platform is now considered unsafe (#92)
-
-Version 2.3.2 released 2012-07-27
-
-* Case insensitive match for "Connection: close" (#81)
-
-Version 2.3.1 released 2012-03-31
-
-* Fix edoc warnings (#63)
-* Fix mochiweb_html handling of invalid charref sequences (unescaped &) (#69).
-* Add a manual garbage collection between requests to avoid worst case behavior
-  on keep-alive sockets.
-* Fix dst cookie bug (#73)
-* Removed unnecessary template_dir option, see
-  https://github.com/basho/rebar/issues/203
-
-Version 2.3.0 released 2011-10-14
-
-* Handle ssl_closed message in mochiweb_http (#59)
-* Added support for new MIME types (otf, eot, m4v, svg, svgz, ttc, ttf,
-  vcf, webm, webp, woff) (#61)
-* Updated mochiweb_charref to support all HTML5 entities. Note that
-  if you are using this module directly, the spec has changed to return
-  `[integer()]` for some entities. (#64)
-
-Version 2.2.1 released 2011-08-31
-
-* Removed `mochiweb_skel` module from the pre-rebar era
-
-Version 2.2.0 released 2011-08-29
-
-* Added new `mochiweb_http:start_link/1` and
-  `mochiweb_socket_server:start_link/1` APIs to explicitly start linked
-  servers. Also added `{link, false}` option to the `start/1` variants
-  to explicitly start unlinked. This is in expectation that we will
-  eventually change the default behavior of `start/1` to be unlinked as you
-  would expect it to. See https://github.com/mochi/mochiweb/issues/58 for
-  discussion.
-
-Version 2.1.0 released 2011-08-29
-
-* Added new `mochijson2:decode/2` with `{format, struct | proplist | eep18}`
-  options for easy decoding to various proplist formats. Also added encoding
-  support for eep18 style objects.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile
deleted file mode 100644 (file)
index 9de1944..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-
-PREFIX:=../
-DEST:=$(PREFIX)$(PROJECT)
-
-REBAR=./rebar
-
-all:
-       @$(REBAR) get-deps compile
-
-edoc:
-       @$(REBAR) doc
-
-test:
-       @rm -rf .eunit
-       @mkdir -p .eunit
-       @$(REBAR) skip_deps=true eunit
-
-clean:
-       @$(REBAR) clean
-
-build_plt:
-       @$(REBAR) build-plt
-
-dialyzer:
-       @$(REBAR) dialyze
-
-app:
-       @$(REBAR) create template=mochiwebapp dest=$(DEST) appid=$(PROJECT)
-
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar
deleted file mode 100755 (executable)
index 8082f14..0000000
Binary files a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar and /dev/null differ
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl
deleted file mode 100755 (executable)
index f49ed39..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env escript
-%% -*- mode: erlang -*-
--export([main/1]).
-
-%% External API
-
-main(_) ->
-    usage().
-
-%% Internal API
-
-usage() ->
-    io:format(
-        "new_mochiweb.erl has been replaced by a rebar template!\n"
-        "\n"
-        "To create a new mochiweb using project:\n"
-        "   make app PROJECT=project_name\n"
-        "\n"
-        "To create a new mochiweb using project in a specific directory:\n"
-        "   make app PROJECT=project_name PREFIX=$HOME/projects/\n"
-        "\n"
-    ),
-    halt(1).
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl
deleted file mode 100644 (file)
index ea68c4a..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2008 Mochi Media, Inc.
-
-%% @doc Template module for a mochifmt formatter.
-
--module(mochifmt_std).
--author('bob@mochimedia.com').
--export([new/0, format/3, get_value/3, format_field/3, get_field/3, convert_field/3]).
-
-new() ->
-    {?MODULE}.
-
-format(Format, Args, {?MODULE}=THIS) ->
-    mochifmt:format(Format, Args, THIS).
-
-get_field(Key, Args, {?MODULE}=THIS) ->
-    mochifmt:get_field(Key, Args, THIS).
-
-convert_field(Key, Args, {?MODULE}) ->
-    mochifmt:convert_field(Key, Args).
-
-get_value(Key, Args, {?MODULE}) ->
-    mochifmt:get_value(Key, Args).
-
-format_field(Arg, Format, {?MODULE}=THIS) ->
-    mochifmt:format_field(Arg, Format, THIS).
-
-%%
-%% Tests
-%%
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
--endif.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src
deleted file mode 100644 (file)
index 4a6808e..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-%% This is generated from src/mochiweb.app.src
-{application, mochiweb,
- [{description, "MochiMedia Web Server"},
-  {vsn, "2.7.0"},
-  {modules, []},
-  {registered, []},
-  {env, []},
-  {applications, [kernel, stdlib, inets, xmerl]}]}.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl
deleted file mode 100644 (file)
index f597c73..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Start and stop the MochiWeb server.
-
--module(mochiweb).
--author('bob@mochimedia.com').
-
--export([new_request/1, new_response/1]).
--export([all_loaded/0, all_loaded/1, reload/0]).
--export([ensure_started/1]).
-
-reload() ->
-    [c:l(Module) || Module <- all_loaded()].
-
-all_loaded() ->
-    all_loaded(filename:dirname(code:which(?MODULE))).
-
-all_loaded(Base) when is_atom(Base) ->
-    [];
-all_loaded(Base) ->
-    FullBase = Base ++ "/",
-    F = fun ({_Module, Loaded}, Acc) when is_atom(Loaded) ->
-                Acc;
-            ({Module, Loaded}, Acc) ->
-                case lists:prefix(FullBase, Loaded) of
-                    true ->
-                        [Module | Acc];
-                    false ->
-                        Acc
-                end
-        end,
-    lists:foldl(F, [], code:all_loaded()).
-
-
-%% @spec new_request({Socket, Request, Headers}) -> MochiWebRequest
-%% @doc Return a mochiweb_request data structure.
-new_request({Socket, {Method, {abs_path, Uri}, Version}, Headers}) ->
-    mochiweb_request:new(Socket,
-                         Method,
-                         Uri,
-                         Version,
-                         mochiweb_headers:make(Headers));
-% this case probably doesn't "exist".
-new_request({Socket, {Method, {absoluteURI, _Protocol, _Host, _Port, Uri},
-                      Version}, Headers}) ->
-    mochiweb_request:new(Socket,
-                         Method,
-                         Uri,
-                         Version,
-                         mochiweb_headers:make(Headers));
-%% Request-URI is "*"
-%% From http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
-new_request({Socket, {Method, '*'=Uri, Version}, Headers}) ->
-    mochiweb_request:new(Socket,
-                         Method,
-                         Uri,
-                         Version,
-                         mochiweb_headers:make(Headers)).
-
-%% @spec new_response({Request, integer(), Headers}) -> MochiWebResponse
-%% @doc Return a mochiweb_response data structure.
-new_response({Request, Code, Headers}) ->
-    mochiweb_response:new(Request,
-                          Code,
-                          mochiweb_headers:make(Headers)).
-
-%% @spec ensure_started(App::atom()) -> ok
-%% @doc Start the given App if it has not been started already.
-ensure_started(App) ->
-    case application:start(App) of
-        ok ->
-            ok;
-        {error, {already_started, App}} ->
-            ok
-    end.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl
deleted file mode 100644 (file)
index ebbaf45..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc MochiWeb acceptor.
-
--module(mochiweb_acceptor).
--author('bob@mochimedia.com').
-
--include("internal.hrl").
-
--export([start_link/3, init/3]).
-
-start_link(Server, Listen, Loop) ->
-    proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop]).
-
-init(Server, Listen, Loop) ->
-    T1 = os:timestamp(),
-    case catch mochiweb_socket:accept(Listen) of
-        {ok, Socket} ->
-            gen_server:cast(Server, {accepted, self(), timer:now_diff(os:timestamp(), T1)}),
-            call_loop(Loop, Socket);
-        {error, closed} ->
-            exit(normal);
-        {error, timeout} ->
-            init(Server, Listen, Loop);
-        {error, esslaccept} ->
-            exit(normal);
-        Other ->
-            error_logger:error_report(
-              [{application, mochiweb},
-               "Accept failed error",
-               lists:flatten(io_lib:format("~p", [Other]))]),
-            exit({error, accept_failed})
-    end.
-
-call_loop({M, F}, Socket) ->
-    M:F(Socket);
-call_loop({M, F, [A1]}, Socket) ->
-    M:F(Socket, A1);
-call_loop({M, F, A}, Socket) ->
-    erlang:apply(M, F, [Socket | A]);
-call_loop(Loop, Socket) ->
-    Loop(Socket).
-
-%%
-%% Tests
-%%
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
--endif.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl
deleted file mode 100644 (file)
index e145840..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Simple and stupid echo server to demo mochiweb_socket_server.
-
--module(mochiweb_echo).
--author('bob@mochimedia.com').
--export([start/0, stop/0, loop/1]).
-
-stop() ->
-    mochiweb_socket_server:stop(?MODULE).
-
-start() ->
-    mochiweb_socket_server:start([{link, false} | options()]).
-
-options() ->
-    [{name, ?MODULE},
-     {port, 6789},
-     {ip, "127.0.0.1"},
-     {max, 1},
-     {loop, {?MODULE, loop}}].
-
-loop(Socket) ->
-    case mochiweb_socket:recv(Socket, 0, 30000) of
-        {ok, Data} ->
-            case mochiweb_socket:send(Socket, Data) of
-                ok ->
-                    loop(Socket);
-                _ ->
-                    exit(normal)
-            end;
-        _Other ->
-            exit(normal)
-    end.
-
-%%
-%% Tests
-%%
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
--endif.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request_tests.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request_tests.erl
deleted file mode 100644 (file)
index b40c867..0000000
+++ /dev/null
@@ -1,182 +0,0 @@
--module(mochiweb_request_tests).
-
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
-
-accepts_content_type_test() ->
-    Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "multipart/related"}])),
-    ?assertEqual(true, Req1:accepts_content_type("multipart/related")),
-    ?assertEqual(true, Req1:accepts_content_type(<<"multipart/related">>)),
-
-    Req2 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html"}])),
-    ?assertEqual(false, Req2:accepts_content_type("multipart/related")),
-
-    Req3 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html, multipart/*"}])),
-    ?assertEqual(true, Req3:accepts_content_type("multipart/related")),
-
-    Req4 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html, multipart/*; q=0.0"}])),
-    ?assertEqual(false, Req4:accepts_content_type("multipart/related")),
-
-    Req5 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html, multipart/*; q=0"}])),
-    ?assertEqual(false, Req5:accepts_content_type("multipart/related")),
-
-    Req6 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html, */*; q=0.0"}])),
-    ?assertEqual(false, Req6:accepts_content_type("multipart/related")),
-
-    Req7 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "multipart/*; q=0.0, */*"}])),
-    ?assertEqual(false, Req7:accepts_content_type("multipart/related")),
-
-    Req8 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "*/*; q=0.0, multipart/*"}])),
-    ?assertEqual(true, Req8:accepts_content_type("multipart/related")),
-
-    Req9 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "*/*; q=0.0, multipart/related"}])),
-    ?assertEqual(true, Req9:accepts_content_type("multipart/related")),
-
-    Req10 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html; level=1"}])),
-    ?assertEqual(true, Req10:accepts_content_type("text/html;level=1")),
-
-    Req11 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html; level=1, text/html"}])),
-    ?assertEqual(true, Req11:accepts_content_type("text/html")),
-
-    Req12 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html; level=1; q=0.0, text/html"}])),
-    ?assertEqual(false, Req12:accepts_content_type("text/html;level=1")),
-
-    Req13 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html; level=1; q=0.0, text/html"}])),
-    ?assertEqual(false, Req13:accepts_content_type("text/html; level=1")),
-
-    Req14 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html;level=1;q=0.1, text/html"}])),
-    ?assertEqual(true, Req14:accepts_content_type("text/html; level=1")).
-
-accepted_encodings_test() ->
-    Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-                                mochiweb_headers:make([])),
-    ?assertEqual(["identity"],
-                 Req1:accepted_encodings(["gzip", "identity"])),
-
-    Req2 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "gzip, deflate"}])),
-    ?assertEqual(["gzip", "identity"],
-                 Req2:accepted_encodings(["gzip", "identity"])),
-
-    Req3 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "gzip;q=0.5, deflate"}])),
-    ?assertEqual(["deflate", "gzip", "identity"],
-                 Req3:accepted_encodings(["gzip", "deflate", "identity"])),
-
-    Req4 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "identity, *;q=0"}])),
-    ?assertEqual(["identity"],
-                 Req4:accepted_encodings(["gzip", "deflate", "identity"])),
-
-    Req5 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "gzip; q=0.1, *;q=0"}])),
-    ?assertEqual(["gzip"],
-                 Req5:accepted_encodings(["gzip", "deflate", "identity"])),
-
-    Req6 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "gzip; q=, *;q=0"}])),
-    ?assertEqual(bad_accept_encoding_value,
-                 Req6:accepted_encodings(["gzip", "deflate", "identity"])),
-
-    Req7 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "gzip;q=2.0, *;q=0"}])),
-    ?assertEqual(bad_accept_encoding_value,
-                 Req7:accepted_encodings(["gzip", "identity"])),
-
-    Req8 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept-Encoding", "deflate, *;q=0.0"}])),
-    ?assertEqual([],
-                 Req8:accepted_encodings(["gzip", "identity"])).
-
-accepted_content_types_test() ->
-    Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html"}])),
-    ?assertEqual(["text/html"],
-        Req1:accepted_content_types(["text/html", "application/json"])),
-
-    Req2 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html, */*;q=0"}])),
-    ?assertEqual(["text/html"],
-        Req2:accepted_content_types(["text/html", "application/json"])),
-
-    Req3 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/*, */*;q=0"}])),
-    ?assertEqual(["text/html"],
-        Req3:accepted_content_types(["text/html", "application/json"])),
-
-    Req4 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/*;q=0.8, */*;q=0.5"}])),
-    ?assertEqual(["text/html", "application/json"],
-        Req4:accepted_content_types(["application/json", "text/html"])),
-
-    Req5 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/*;q=0.8, */*;q=0.5"}])),
-    ?assertEqual(["text/html", "application/json"],
-        Req5:accepted_content_types(["text/html", "application/json"])),
-
-    Req6 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/*;q=0.5, */*;q=0.5"}])),
-    ?assertEqual(["application/json", "text/html"],
-        Req6:accepted_content_types(["application/json", "text/html"])),
-
-    Req7 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make(
-            [{"Accept", "text/html;q=0.5, application/json;q=0.5"}])),
-    ?assertEqual(["application/json", "text/html"],
-        Req7:accepted_content_types(["application/json", "text/html"])),
-
-    Req8 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/html"}])),
-    ?assertEqual([],
-        Req8:accepted_content_types(["application/json"])),
-
-    Req9 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1},
-        mochiweb_headers:make([{"Accept", "text/*;q=0.9, text/html;q=0.5, */*;q=0.7"}])),
-    ?assertEqual(["application/json", "text/html"],
-        Req9:accepted_content_types(["text/html", "application/json"])).
-
-should_close_test() ->
-    F = fun (V, H) ->
-                (mochiweb_request:new(
-                   nil, 'GET', "/", V,
-                   mochiweb_headers:make(H)
-                  )):should_close()
-        end,
-    ?assertEqual(
-       true,
-       F({1, 1}, [{"Connection", "close"}])),
-    ?assertEqual(
-       true,
-       F({1, 0}, [{"Connection", "close"}])),
-    ?assertEqual(
-       true,
-       F({1, 1}, [{"Connection", "ClOSe"}])),
-    ?assertEqual(
-       false,
-       F({1, 1}, [{"Connection", "closer"}])),
-    ?assertEqual(
-       false,
-       F({1, 1}, [])),
-    ?assertEqual(
-       true,
-       F({1, 0}, [])),
-    ?assertEqual(
-       false,
-       F({1, 0}, [{"Connection", "Keep-Alive"}])),
-    ok.
-
--endif.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl
deleted file mode 100644 (file)
index 76b018c..0000000
+++ /dev/null
@@ -1,84 +0,0 @@
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc MochiWeb socket - wrapper for plain and ssl sockets.
-
--module(mochiweb_socket).
-
--export([listen/4, accept/1, recv/3, send/2, close/1, port/1, peername/1,
-         setopts/2, type/1]).
-
--define(ACCEPT_TIMEOUT, 2000).
-
-listen(Ssl, Port, Opts, SslOpts) ->
-    case Ssl of
-        true ->
-            case ssl:listen(Port, Opts ++ SslOpts) of
-                {ok, ListenSocket} ->
-                    {ok, {ssl, ListenSocket}};
-                {error, _} = Err ->
-                    Err
-            end;
-        false ->
-            gen_tcp:listen(Port, Opts)
-    end.
-
-accept({ssl, ListenSocket}) ->
-    % There's a bug in ssl:transport_accept/2 at the moment, which is the
-    % reason for the try...catch block. Should be fixed in OTP R14.
-    try ssl:transport_accept(ListenSocket) of
-        {ok, Socket} ->
-            case ssl:ssl_accept(Socket) of
-                ok ->
-                    {ok, {ssl, Socket}};
-                {error, _} = Err ->
-                    Err
-            end;
-        {error, _} = Err ->
-            Err
-    catch
-        error:{badmatch, {error, Reason}} ->
-            {error, Reason}
-    end;
-accept(ListenSocket) ->
-    gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT).
-
-recv({ssl, Socket}, Length, Timeout) ->
-    ssl:recv(Socket, Length, Timeout);
-recv(Socket, Length, Timeout) ->
-    gen_tcp:recv(Socket, Length, Timeout).
-
-send({ssl, Socket}, Data) ->
-    ssl:send(Socket, Data);
-send(Socket, Data) ->
-    gen_tcp:send(Socket, Data).
-
-close({ssl, Socket}) ->
-    ssl:close(Socket);
-close(Socket) ->
-    gen_tcp:close(Socket).
-
-port({ssl, Socket}) ->
-    case ssl:sockname(Socket) of
-        {ok, {_, Port}} ->
-            {ok, Port};
-        {error, _} = Err ->
-            Err
-    end;
-port(Socket) ->
-    inet:port(Socket).
-
-peername({ssl, Socket}) ->
-    ssl:peername(Socket);
-peername(Socket) ->
-    inet:peername(Socket).
-
-setopts({ssl, Socket}, Opts) ->
-    ssl:setopts(Socket, Opts);
-setopts(Socket, Opts) ->
-    inet:setopts(Socket, Opts).
-
-type({ssl, _}) ->
-    ssl;
-type(_) ->
-    plain.
-
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh
deleted file mode 100755 (executable)
index fb7c45e..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-# NOTE: mustache templates need \\ because they are not awesome.
-exec erl -pa ebin edit deps/*/ebin -boot start_sasl \\
-    -sname {{appid}}_dev \\
-    -s {{appid}} \\
-    -s reloader
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_base64url_tests.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_base64url_tests.erl
deleted file mode 100644 (file)
index 69f276a..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
--module(mochiweb_base64url_tests).
--include_lib("eunit/include/eunit.hrl").
-
-id(X) ->
-    ?assertEqual(
-       X,
-       mochiweb_base64url:decode(mochiweb_base64url:encode(X))),
-    ?assertEqual(
-       X,
-       mochiweb_base64url:decode(
-         binary_to_list(mochiweb_base64url:encode(binary_to_list(X))))).
-
-random_binary(Short,Long) ->
-    << <<(random:uniform(256) - 1)>>
-     || _ <- lists:seq(1, Short + random:uniform(1 + Long - Short) - 1) >>.
-
-empty_test() ->
-    id(<<>>).
-
-onechar_test() ->
-    [id(<<C>>) || C <- lists:seq(0,255)],
-    ok.
-
-nchar_test() ->
-    %% 1000 tests of 2-6 char strings
-    [id(B) || _ <- lists:seq(1,1000), B <- [random_binary(2, 6)]],
-    ok.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_html_tests.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_html_tests.erl
deleted file mode 100644 (file)
index 3d35400..0000000
+++ /dev/null
@@ -1,589 +0,0 @@
--module(mochiweb_html_tests).
--include_lib("eunit/include/eunit.hrl").
-
-to_html_test() ->
-    ?assertEqual(
-       <<"<html><head><title>hey!</title></head><body><p class=\"foo\">what's up<br /></p><div>sucka</div>RAW!<!-- comment! --></body></html>">>,
-       iolist_to_binary(
-         mochiweb_html:to_html({html, [],
-                  [{<<"head">>, [],
-                    [{title, <<"hey!">>}]},
-                   {body, [],
-                    [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
-                     {'div', <<"sucka">>},
-                     {'=', <<"RAW!">>},
-                     {comment, <<" comment! ">>}]}]}))),
-    ?assertEqual(
-       <<"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">">>,
-       iolist_to_binary(
-         mochiweb_html:to_html({doctype,
-                  [<<"html">>, <<"PUBLIC">>,
-                   <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
-                   <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]}))),
-    ?assertEqual(
-       <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>,
-       iolist_to_binary(
-         mochiweb_html:to_html({<<"html">>,[],
-                  [{pi, <<"xml:namespace">>,
-                    [{<<"prefix">>,<<"o">>},
-                     {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]}))),
-    ok.
-
-escape_test() ->
-    ?assertEqual(
-       <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
-       mochiweb_html:escape(<<"&quot;\"word ><<up!&quot;">>)),
-    ?assertEqual(
-       <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
-       mochiweb_html:escape("&quot;\"word ><<up!&quot;")),
-    ?assertEqual(
-       <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
-       mochiweb_html:escape('&quot;\"word ><<up!&quot;')),
-    ok.
-
-escape_attr_test() ->
-    ?assertEqual(
-       <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
-       mochiweb_html:escape_attr(<<"&quot;\"word ><<up!&quot;">>)),
-    ?assertEqual(
-       <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
-       mochiweb_html:escape_attr("&quot;\"word ><<up!&quot;")),
-    ?assertEqual(
-       <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
-       mochiweb_html:escape_attr('&quot;\"word ><<up!&quot;')),
-    ?assertEqual(
-       <<"12345">>,
-       mochiweb_html:escape_attr(12345)),
-    ?assertEqual(
-       <<"1.5">>,
-       mochiweb_html:escape_attr(1.5)),
-    ok.
-
-tokens_test() ->
-    ?assertEqual(
-       [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
-                                {<<"wibble">>, <<"wibble">>},
-                                {<<"alice">>, <<"bob">>}], true}],
-       mochiweb_html:tokens(<<"<foo bar=baz wibble='wibble' alice=\"bob\"/>">>)),
-    ?assertEqual(
-       [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
-                                {<<"wibble">>, <<"wibble">>},
-                                {<<"alice">>, <<"bob">>}], true}],
-       mochiweb_html:tokens(<<"<foo bar=baz wibble='wibble' alice=bob/>">>)),
-    ?assertEqual(
-       [{comment, <<"[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]">>}],
-       mochiweb_html:tokens(<<"<!--[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]-->">>)),
-    ?assertEqual(
-       [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
-        {data, <<" A= B <= C ">>, false},
-        {end_tag, <<"script">>}],
-       mochiweb_html:tokens(<<"<script type=\"text/javascript\"> A= B <= C </script>">>)),
-    ?assertEqual(
-       [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
-        {data, <<" A= B <= C ">>, false},
-        {end_tag, <<"script">>}],
-       mochiweb_html:tokens(<<"<script type =\"text/javascript\"> A= B <= C </script>">>)),
-    ?assertEqual(
-       [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
-        {data, <<" A= B <= C ">>, false},
-        {end_tag, <<"script">>}],
-       mochiweb_html:tokens(<<"<script type = \"text/javascript\"> A= B <= C </script>">>)),
-    ?assertEqual(
-       [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
-        {data, <<" A= B <= C ">>, false},
-        {end_tag, <<"script">>}],
-       mochiweb_html:tokens(<<"<script type= \"text/javascript\"> A= B <= C </script>">>)),
-    ?assertEqual(
-       [{start_tag, <<"textarea">>, [], false},
-        {data, <<"<html></body>">>, false},
-        {end_tag, <<"textarea">>}],
-       mochiweb_html:tokens(<<"<textarea><html></body></textarea>">>)),
-    ?assertEqual(
-       [{start_tag, <<"textarea">>, [], false},
-        {data, <<"<html></body></textareaz>">>, false}],
-       mochiweb_html:tokens(<<"<textarea ><html></body></textareaz>">>)),
-    ?assertEqual(
-       [{pi, <<"xml:namespace">>,
-         [{<<"prefix">>,<<"o">>},
-          {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
-       mochiweb_html:tokens(<<"<?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?>">>)),
-    ?assertEqual(
-       [{pi, <<"xml:namespace">>,
-         [{<<"prefix">>,<<"o">>},
-          {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
-       mochiweb_html:tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office \n?>">>)),
-    ?assertEqual(
-       [{pi, <<"xml:namespace">>,
-         [{<<"prefix">>,<<"o">>},
-          {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
-       mochiweb_html:tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office">>)),
-    ?assertEqual(
-       [{data, <<"<">>, false}],
-       mochiweb_html:tokens(<<"&lt;">>)),
-    ?assertEqual(
-       [{data, <<"not html ">>, false},
-        {data, <<"< at all">>, false}],
-       mochiweb_html:tokens(<<"not html < at all">>)),
-    ok.
-
-parse_test() ->
-    D0 = <<"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">
-<html>
- <head>
-   <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">
-   <title>Foo</title>
-   <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/rel/dojo/resources/dojo.css\" media=\"screen\">
-   <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/foo.css\" media=\"screen\">
-   <!--[if lt IE 7]>
-   <style type=\"text/css\">
-     .no_ie { display: none; }
-   </style>
-   <![endif]-->
-   <link rel=\"icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
-   <link rel=\"shortcut icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- </head>
- <body id=\"home\" class=\"tundra\"><![CDATA[&lt;<this<!-- is -->CDATA>&gt;]]></body>
-</html>">>,
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"head">>, [],
-          [{<<"meta">>,
-            [{<<"http-equiv">>,<<"Content-Type">>},
-             {<<"content">>,<<"text/html; charset=UTF-8">>}],
-            []},
-           {<<"title">>,[],[<<"Foo">>]},
-           {<<"link">>,
-            [{<<"rel">>,<<"stylesheet">>},
-             {<<"type">>,<<"text/css">>},
-             {<<"href">>,<<"/static/rel/dojo/resources/dojo.css">>},
-             {<<"media">>,<<"screen">>}],
-            []},
-           {<<"link">>,
-            [{<<"rel">>,<<"stylesheet">>},
-             {<<"type">>,<<"text/css">>},
-             {<<"href">>,<<"/static/foo.css">>},
-             {<<"media">>,<<"screen">>}],
-            []},
-           {comment,<<"[if lt IE 7]>\n   <style type=\"text/css\">\n     .no_ie { display: none; }\n   </style>\n   <![endif]">>},
-           {<<"link">>,
-            [{<<"rel">>,<<"icon">>},
-             {<<"href">>,<<"/static/images/favicon.ico">>},
-             {<<"type">>,<<"image/x-icon">>}],
-            []},
-           {<<"link">>,
-            [{<<"rel">>,<<"shortcut icon">>},
-             {<<"href">>,<<"/static/images/favicon.ico">>},
-             {<<"type">>,<<"image/x-icon">>}],
-            []}]},
-         {<<"body">>,
-          [{<<"id">>,<<"home">>},
-           {<<"class">>,<<"tundra">>}],
-          [<<"&lt;<this<!-- is -->CDATA>&gt;">>]}]},
-       mochiweb_html:parse(D0)),
-    ?assertEqual(
-       {<<"html">>,[],
-        [{pi, <<"xml:namespace">>,
-          [{<<"prefix">>,<<"o">>},
-           {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]},
-       mochiweb_html:parse(
-         <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>)),
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"dd">>, [], [<<"foo">>]},
-         {<<"dt">>, [], [<<"bar">>]}]},
-       mochiweb_html:parse(<<"<html><dd>foo<dt>bar</html>">>)),
-    %% Singleton sadness
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"link">>, [], []},
-         <<"foo">>,
-         {<<"br">>, [], []},
-         <<"bar">>]},
-       mochiweb_html:parse(<<"<html><link>foo<br>bar</html>">>)),
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"link">>, [], [<<"foo">>,
-                           {<<"br">>, [], []},
-                           <<"bar">>]}]},
-       mochiweb_html:parse(<<"<html><link>foo<br>bar</link></html>">>)),
-    %% Case insensitive tags
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"head">>, [], [<<"foo">>,
-                           {<<"br">>, [], []},
-                           <<"BAR">>]},
-         {<<"body">>, [{<<"class">>, <<"">>}, {<<"bgcolor">>, <<"#Aa01fF">>}], []}
-        ]},
-       mochiweb_html:parse(<<"<html><Head>foo<bR>BAR</head><body Class=\"\" bgcolor=\"#Aa01fF\"></BODY></html>">>)),
-    ok.
-
-exhaustive_is_singleton_test() ->
-    T = mochiweb_cover:clause_lookup_table(mochiweb_html, is_singleton),
-    [?assertEqual(V, mochiweb_html:is_singleton(K)) || {K, V} <- T].
-
-tokenize_attributes_test() ->
-    ?assertEqual(
-       {<<"foo">>,
-        [{<<"bar">>, <<"b\"az">>},
-         {<<"wibble">>, <<"wibble">>},
-         {<<"taco", 16#c2, 16#a9>>, <<"bell">>},
-         {<<"quux">>, <<"quux">>}],
-        []},
-       mochiweb_html:parse(<<"<foo bar=\"b&quot;az\" wibble taco&copy;=bell quux">>)),
-    ok.
-
-tokens2_test() ->
-    D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org</link><description>Bob's Rants</description></channel>">>,
-    ?assertEqual(
-       [{start_tag,<<"channel">>,[],false},
-        {start_tag,<<"title">>,[],false},
-        {data,<<"from __future__ import *">>,false},
-        {end_tag,<<"title">>},
-        {start_tag,<<"link">>,[],true},
-        {data,<<"http://bob.pythonmac.org">>,false},
-        {end_tag,<<"link">>},
-        {start_tag,<<"description">>,[],false},
-        {data,<<"Bob's Rants">>,false},
-        {end_tag,<<"description">>},
-        {end_tag,<<"channel">>}],
-       mochiweb_html:tokens(D0)),
-    ok.
-
-to_tokens_test() ->
-    ?assertEqual(
-       [{start_tag, <<"p">>, [{class, 1}], false},
-        {end_tag, <<"p">>}],
-       mochiweb_html:to_tokens({p, [{class, 1}], []})),
-    ?assertEqual(
-       [{start_tag, <<"p">>, [], false},
-        {end_tag, <<"p">>}],
-       mochiweb_html:to_tokens({p})),
-    ?assertEqual(
-       [{'=', <<"data">>}],
-       mochiweb_html:to_tokens({'=', <<"data">>})),
-    ?assertEqual(
-       [{comment, <<"comment">>}],
-       mochiweb_html:to_tokens({comment, <<"comment">>})),
-    %% This is only allowed in sub-tags:
-    %% {p, [{"class", "foo"}]} as {p, [{"class", "foo"}], []}
-    %% On the outside it's always treated as follows:
-    %% {p, [], [{"class", "foo"}]} as {p, [], [{"class", "foo"}]}
-    ?assertEqual(
-       [{start_tag, <<"html">>, [], false},
-        {start_tag, <<"p">>, [{class, 1}], false},
-        {end_tag, <<"p">>},
-        {end_tag, <<"html">>}],
-       mochiweb_html:to_tokens({html, [{p, [{class, 1}]}]})),
-    ok.
-
-parse2_test() ->
-    D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org<br>foo</link><description>Bob's Rants</description></channel>">>,
-    ?assertEqual(
-       {<<"channel">>,[],
-        [{<<"title">>,[],[<<"from __future__ import *">>]},
-         {<<"link">>,[],[
-                         <<"http://bob.pythonmac.org">>,
-                         {<<"br">>,[],[]},
-                         <<"foo">>]},
-         {<<"description">>,[],[<<"Bob's Rants">>]}]},
-       mochiweb_html:parse(D0)),
-    ok.
-
-parse_tokens_test() ->
-    D0 = [{doctype,[<<"HTML">>,<<"PUBLIC">>,<<"-//W3C//DTD HTML 4.01 Transitional//EN">>]},
-          {data,<<"\n">>,true},
-          {start_tag,<<"html">>,[],false}],
-    ?assertEqual(
-       {<<"html">>, [], []},
-       mochiweb_html:parse_tokens(D0)),
-    D1 = D0 ++ [{end_tag, <<"html">>}],
-    ?assertEqual(
-       {<<"html">>, [], []},
-       mochiweb_html:parse_tokens(D1)),
-    D2 = D0 ++ [{start_tag, <<"body">>, [], false}],
-    ?assertEqual(
-       {<<"html">>, [], [{<<"body">>, [], []}]},
-       mochiweb_html:parse_tokens(D2)),
-    D3 = D0 ++ [{start_tag, <<"head">>, [], false},
-                {end_tag, <<"head">>},
-                {start_tag, <<"body">>, [], false}],
-    ?assertEqual(
-       {<<"html">>, [], [{<<"head">>, [], []}, {<<"body">>, [], []}]},
-       mochiweb_html:parse_tokens(D3)),
-    D4 = D3 ++ [{data,<<"\n">>,true},
-                {start_tag,<<"div">>,[{<<"class">>,<<"a">>}],false},
-                {start_tag,<<"a">>,[{<<"name">>,<<"#anchor">>}],false},
-                {end_tag,<<"a">>},
-                {end_tag,<<"div">>},
-                {start_tag,<<"div">>,[{<<"class">>,<<"b">>}],false},
-                {start_tag,<<"div">>,[{<<"class">>,<<"c">>}],false},
-                {end_tag,<<"div">>},
-                {end_tag,<<"div">>}],
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"head">>, [], []},
-         {<<"body">>, [],
-          [{<<"div">>, [{<<"class">>, <<"a">>}], [{<<"a">>, [{<<"name">>, <<"#anchor">>}], []}]},
-           {<<"div">>, [{<<"class">>, <<"b">>}], [{<<"div">>, [{<<"class">>, <<"c">>}], []}]}
-          ]}]},
-       mochiweb_html:parse_tokens(D4)),
-    D5 = [{start_tag,<<"html">>,[],false},
-          {data,<<"\n">>,true},
-          {data,<<"boo">>,false},
-          {data,<<"hoo">>,false},
-          {data,<<"\n">>,true},
-          {end_tag,<<"html">>}],
-    ?assertEqual(
-       {<<"html">>, [], [<<"\nboohoo\n">>]},
-       mochiweb_html:parse_tokens(D5)),
-    D6 = [{start_tag,<<"html">>,[],false},
-          {data,<<"\n">>,true},
-          {data,<<"\n">>,true},
-          {end_tag,<<"html">>}],
-    ?assertEqual(
-       {<<"html">>, [], []},
-       mochiweb_html:parse_tokens(D6)),
-    D7 = [{start_tag,<<"html">>,[],false},
-          {start_tag,<<"ul">>,[],false},
-          {start_tag,<<"li">>,[],false},
-          {data,<<"word">>,false},
-          {start_tag,<<"li">>,[],false},
-          {data,<<"up">>,false},
-          {end_tag,<<"li">>},
-          {start_tag,<<"li">>,[],false},
-          {data,<<"fdsa">>,false},
-          {start_tag,<<"br">>,[],true},
-          {data,<<"asdf">>,false},
-          {end_tag,<<"ul">>},
-          {end_tag,<<"html">>}],
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"ul">>, [],
-          [{<<"li">>, [], [<<"word">>]},
-           {<<"li">>, [], [<<"up">>]},
-           {<<"li">>, [], [<<"fdsa">>,{<<"br">>, [], []}, <<"asdf">>]}]}]},
-       mochiweb_html:parse_tokens(D7)),
-    ok.
-
-destack_test() ->
-    ?assertEqual(
-       {<<"a">>, [], []},
-       mochiweb_html:destack([{<<"a">>, [], []}])),
-    ?assertEqual(
-       {<<"a">>, [], [{<<"b">>, [], []}]},
-       mochiweb_html:destack([{<<"b">>, [], []}, {<<"a">>, [], []}])),
-    ?assertEqual(
-       {<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]},
-       mochiweb_html:destack(
-         [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}])),
-    ?assertEqual(
-       [{<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]}],
-       mochiweb_html:destack(
-         <<"b">>,
-         [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}])),
-    ?assertEqual(
-       [{<<"b">>, [], [{<<"c">>, [], []}]}, {<<"a">>, [], []}],
-       mochiweb_html:destack(
-         <<"c">>,
-         [{<<"c">>, [], []}, {<<"b">>, [], []},{<<"a">>, [], []}])),
-    ok.
-
-doctype_test() ->
-    ?assertEqual(
-       {<<"html">>,[],[{<<"head">>,[],[]}]},
-       mochiweb_html:parse("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
-                           "<html><head></head></body></html>")),
-    %% http://code.google.com/p/mochiweb/issues/detail?id=52
-    ?assertEqual(
-       {<<"html">>,[],[{<<"head">>,[],[]}]},
-       mochiweb_html:parse("<html>"
-                           "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
-                           "<head></head></body></html>")),
-    %% http://github.com/mochi/mochiweb/pull/13
-    ?assertEqual(
-       {<<"html">>,[],[{<<"head">>,[],[]}]},
-       mochiweb_html:parse("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\"/>"
-                           "<html>"
-                           "<head></head></body></html>")),
-    ok.
-
-dumb_br_test() ->
-    %% http://code.google.com/p/mochiweb/issues/detail?id=71
-    ?assertEqual(
-       {<<"div">>,[],[{<<"br">>, [], []}, {<<"br">>, [], []}, <<"z">>]},
-       mochiweb_html:parse("<div><br/><br/>z</br/></br/></div>")),
-    ?assertEqual(
-       {<<"div">>,[],[{<<"br">>, [], []}, {<<"br">>, [], []}, <<"z">>]},
-       mochiweb_html:parse("<div><br><br>z</br/></br/></div>")),
-    ?assertEqual(
-       {<<"div">>,[],[{<<"br">>, [], []}, {<<"br">>, [], []}, <<"z">>, {<<"br">>, [], []}, {<<"br">>, [], []}]},
-       mochiweb_html:parse("<div><br><br>z<br/><br/></div>")),
-    ?assertEqual(
-       {<<"div">>,[],[{<<"br">>, [], []}, {<<"br">>, [], []}, <<"z">>]},
-       mochiweb_html:parse("<div><br><br>z</br></br></div>")).
-
-
-php_test() ->
-    %% http://code.google.com/p/mochiweb/issues/detail?id=71
-    ?assertEqual(
-       [{pi, <<"php\n">>}],
-       mochiweb_html:tokens(
-         "<?php\n?>")),
-    ?assertEqual(
-       {<<"div">>, [], [{pi, <<"php\n">>}]},
-       mochiweb_html:parse(
-         "<div><?php\n?></div>")),
-    ok.
-
-parse_unquoted_attr_test() ->
-    D0 = <<"<html><img src=/images/icon.png/></html>">>,
-    ?assertEqual(
-        {<<"html">>,[],[
-            { <<"img">>, [ { <<"src">>, <<"/images/icon.png">> } ], [] }
-        ]},
-        mochiweb_html:parse(D0)),
-
-    D1 = <<"<html><img src=/images/icon.png></img></html>">>,
-        ?assertEqual(
-            {<<"html">>,[],[
-                { <<"img">>, [ { <<"src">>, <<"/images/icon.png">> } ], [] }
-            ]},
-            mochiweb_html:parse(D1)),
-
-    D2 = <<"<html><img src=/images/icon&gt;.png width=100></img></html>">>,
-        ?assertEqual(
-            {<<"html">>,[],[
-                { <<"img">>, [ { <<"src">>, <<"/images/icon>.png">> }, { <<"width">>, <<"100">> } ], [] }
-            ]},
-            mochiweb_html:parse(D2)),
-    ok.
-
-parse_quoted_attr_test() ->
-    D0 = <<"<html><img src='/images/icon.png'></html>">>,
-    ?assertEqual(
-        {<<"html">>,[],[
-            { <<"img">>, [ { <<"src">>, <<"/images/icon.png">> } ], [] }
-        ]},
-        mochiweb_html:parse(D0)),
-
-    D1 = <<"<html><img src=\"/images/icon.png'></html>">>,
-    ?assertEqual(
-        {<<"html">>,[],[
-            { <<"img">>, [ { <<"src">>, <<"/images/icon.png'></html>">> } ], [] }
-        ]},
-        mochiweb_html:parse(D1)),
-
-    D2 = <<"<html><img src=\"/images/icon&gt;.png\"></html>">>,
-    ?assertEqual(
-        {<<"html">>,[],[
-            { <<"img">>, [ { <<"src">>, <<"/images/icon>.png">> } ], [] }
-        ]},
-        mochiweb_html:parse(D2)),
-
-    %% Quoted attributes can contain whitespace and newlines
-    D3 = <<"<html><a href=\"#\" onclick=\"javascript: test(1,\ntrue);\"></html>">>,
-    ?assertEqual(
-        {<<"html">>,[],[
-            { <<"a">>, [ { <<"href">>, <<"#">> }, {<<"onclick">>, <<"javascript: test(1,\ntrue);">>} ], [] }
-        ]},
-        mochiweb_html:parse(D3)),
-    ok.
-
-parse_missing_attr_name_test() ->
-    D0 = <<"<html =black></html>">>,
-    ?assertEqual(
-        {<<"html">>, [ { <<"=">>, <<"=">> }, { <<"black">>, <<"black">> } ], [] },
-       mochiweb_html:parse(D0)),
-    ok.
-
-parse_broken_pi_test() ->
-        D0 = <<"<html><?xml:namespace prefix = o ns = \"urn:schemas-microsoft-com:office:office\" /></html>">>,
-        ?assertEqual(
-                {<<"html">>, [], [
-                        { pi, <<"xml:namespace">>, [ { <<"prefix">>, <<"o">> },
-                                                     { <<"ns">>, <<"urn:schemas-microsoft-com:office:office">> } ] }
-                ] },
-                mochiweb_html:parse(D0)),
-        ok.
-
-parse_funny_singletons_test() ->
-        D0 = <<"<html><input><input>x</input></input></html>">>,
-        ?assertEqual(
-                {<<"html">>, [], [
-                        { <<"input">>, [], [] },
-                        { <<"input">>, [], [ <<"x">> ] }
-                ] },
-                mochiweb_html:parse(D0)),
-        ok.
-
-to_html_singleton_test() ->
-    D0 = <<"<link />">>,
-    T0 = {<<"link">>,[],[]},
-    ?assertEqual(D0, iolist_to_binary(mochiweb_html:to_html(T0))),
-
-    D1 = <<"<head><link /></head>">>,
-    T1 = {<<"head">>,[],[{<<"link">>,[],[]}]},
-    ?assertEqual(D1, iolist_to_binary(mochiweb_html:to_html(T1))),
-
-    D2 = <<"<head><link /><link /></head>">>,
-    T2 = {<<"head">>,[],[{<<"link">>,[],[]}, {<<"link">>,[],[]}]},
-    ?assertEqual(D2, iolist_to_binary(mochiweb_html:to_html(T2))),
-
-    %% Make sure singletons are converted to singletons.
-    D3 = <<"<head><link /></head>">>,
-    T3 = {<<"head">>,[],[{<<"link">>,[],[<<"funny">>]}]},
-    ?assertEqual(D3, iolist_to_binary(mochiweb_html:to_html(T3))),
-
-    D4 = <<"<link />">>,
-    T4 = {<<"link">>,[],[<<"funny">>]},
-    ?assertEqual(D4, iolist_to_binary(mochiweb_html:to_html(T4))),
-
-    ok.
-
-parse_amp_test_() ->
-    [?_assertEqual(
-       {<<"html">>,[],
-        [{<<"body">>,[{<<"onload">>,<<"javascript:A('1&2')">>}],[]}]},
-       mochiweb_html:parse("<html><body onload=\"javascript:A('1&2')\"></body></html>")),
-     ?_assertEqual(
-        {<<"html">>,[],
-         [{<<"body">>,[{<<"onload">>,<<"javascript:A('1& 2')">>}],[]}]},
-        mochiweb_html:parse("<html><body onload=\"javascript:A('1& 2')\"></body></html>")),
-     ?_assertEqual(
-        {<<"html">>,[],
-         [{<<"body">>,[],[<<"& ">>]}]},
-        mochiweb_html:parse("<html><body>& </body></html>")),
-     ?_assertEqual(
-        {<<"html">>,[],
-         [{<<"body">>,[],[<<"&">>]}]},
-        mochiweb_html:parse("<html><body>&</body></html>"))].
-
-parse_unescaped_lt_test() ->
-    D1 = <<"<div> < < <a href=\"/\">Back</a></div>">>,
-    ?assertEqual(
-        {<<"div">>, [], [<<" < < ">>, {<<"a">>, [{<<"href">>, <<"/">>}],
-                                       [<<"Back">>]}]},
-        mochiweb_html:parse(D1)),
-
-    D2 = <<"<div> << <a href=\"/\">Back</a></div>">>,
-    ?assertEqual(
-        {<<"div">>, [], [<<" << ">>, {<<"a">>, [{<<"href">>, <<"/">>}],
-                                      [<<"Back">>]}]},
-    mochiweb_html:parse(D2)).
-
-html5_doctype_test() ->
-    ?assertEqual(
-       [{doctype,[<<"html">>]},
-        {start_tag,<<"head">>,[],false},
-        {end_tag,<<"head">>},
-        {start_tag,<<"body">>,[],false},
-        {end_tag,<<"body">>}],
-       mochiweb_html:tokens("<!doctype html><head></head><body></body>")).
-
-implicit_html_test() ->
-    %% https://github.com/mochi/mochiweb/issues/110
-    ?assertEqual(
-       {<<"html">>, [],
-        [{<<"head">>, [], []},
-         {<<"body">>, [], []}]},
-       mochiweb_html:parse("<!doctype html><head></head><body></body>")).
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_http_tests.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_http_tests.erl
deleted file mode 100644 (file)
index 0003451..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
--module(mochiweb_http_tests).
--include_lib("eunit/include/eunit.hrl").
-
--ifdef(gen_tcp_r15b_workaround).
--define(SHOULD_HAVE_BUG, true).
--else.
--define(SHOULD_HAVE_BUG, false).
--endif.
-
-has_acceptor_bug_test_() ->
-    {setup,
-     fun start_server/0,
-     fun mochiweb_http:stop/1,
-     fun has_acceptor_bug_tests/1}.
-
-start_server() ->
-    application:start(inets),
-    {ok, Pid} = mochiweb_http:start_link([{port, 0},
-                                          {loop, fun responder/1}]),
-    Pid.
-
-has_acceptor_bug_tests(Server) ->
-    Port = mochiweb_socket_server:get(Server, port),
-    [{"1000 should be fine even with the bug",
-      ?_assertEqual(false, has_bug(Port, 1000))},
-     {"10000 should trigger the bug if present",
-      ?_assertEqual(?SHOULD_HAVE_BUG, has_bug(Port, 10000))}].
-
-responder(Req) ->
-    Req:respond({200,
-                 [{"Content-Type", "text/html"}],
-                 ["<html><body>Hello</body></html>"]}).
-
-has_bug(Port, Len) ->
-  case
-    httpc:request(get, {"http://127.0.0.1:" ++ integer_to_list(Port) ++ "/",
-                        [{"X-Random", lists:duplicate(Len, $a)}]}, [], [])
-  of
-      {error, socket_closed_remotely} ->
-          true;
-      {ok, {{"HTTP/1.1", 200, "OK"}, _, "<html><body>Hello</body></html>"}} ->
-          false;
-      {ok, {{"HTTP/1.1", 400, "Bad Request"}, _, []}} ->
-          false
-  end.
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl
deleted file mode 100644 (file)
index 15cb06a..0000000
+++ /dev/null
@@ -1,199 +0,0 @@
--module(mochiweb_tests).
--include_lib("eunit/include/eunit.hrl").
-
--record(treq, {path, body= <<>>, xreply= <<>>}).
-
-ssl_cert_opts() ->
-    EbinDir = filename:dirname(code:which(?MODULE)),
-    CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
-    CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
-    KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
-    [{certfile, CertFile}, {keyfile, KeyFile}].
-
-with_server(Transport, ServerFun, ClientFun) ->
-    ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
-    ServerOpts = case Transport of
-        plain ->
-            ServerOpts0;
-        ssl ->
-            ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
-    end,
-    {ok, Server} = mochiweb_http:start_link(ServerOpts),
-    Port = mochiweb_socket_server:get(Server, port),
-    Res = (catch ClientFun(Transport, Port)),
-    mochiweb_http:stop(Server),
-    Res.
-
-request_test() ->
-    R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
-    "/foo/bar/baz wibble quux" = R:get(path),
-    ok.
-
--define(LARGE_TIMEOUT, 60).
-
-single_http_GET_test() ->
-    do_GET(plain, 1).
-
-single_https_GET_test() ->
-    do_GET(ssl, 1).
-
-multiple_http_GET_test() ->
-    do_GET(plain, 3).
-
-multiple_https_GET_test() ->
-    do_GET(ssl, 3).
-
-hundred_http_GET_test_() -> % note the underscore
-    {timeout, ?LARGE_TIMEOUT,
-     fun() -> ?assertEqual(ok, do_GET(plain,100)) end}.
-
-hundred_https_GET_test_() -> % note the underscore
-    {timeout, ?LARGE_TIMEOUT,
-     fun() -> ?assertEqual(ok, do_GET(ssl,100)) end}.
-
-single_128_http_POST_test() ->
-    do_POST(plain, 128, 1).
-
-single_128_https_POST_test() ->
-    do_POST(ssl, 128, 1).
-
-single_2k_http_POST_test() ->
-    do_POST(plain, 2048, 1).
-
-single_2k_https_POST_test() ->
-    do_POST(ssl, 2048, 1).
-
-single_100k_http_POST_test() ->
-    do_POST(plain, 102400, 1).
-
-single_100k_https_POST_test() ->
-    do_POST(ssl, 102400, 1).
-
-multiple_100k_http_POST_test() ->
-    do_POST(plain, 102400, 3).
-
-multiple_100K_https_POST_test() ->
-    do_POST(ssl, 102400, 3).
-
-hundred_128_http_POST_test_() -> % note the underscore
-    {timeout, ?LARGE_TIMEOUT,
-     fun() -> ?assertEqual(ok, do_POST(plain, 128, 100)) end}.
-
-hundred_128_https_POST_test_() -> % note the underscore
-    {timeout, ?LARGE_TIMEOUT,
-     fun() -> ?assertEqual(ok, do_POST(ssl, 128, 100)) end}.
-
-do_GET(Transport, Times) ->
-    PathPrefix = "/whatever/",
-    ReplyPrefix = "You requested: ",
-    ServerFun = fun (Req) ->
-                        Reply = ReplyPrefix ++ Req:get(path),
-                        Req:ok({"text/plain", Reply})
-                end,
-    TestReqs = [begin
-                    Path = PathPrefix ++ integer_to_list(N),
-                    ExpectedReply = list_to_binary(ReplyPrefix ++ Path),
-                    #treq{path=Path, xreply=ExpectedReply}
-                end || N <- lists:seq(1, Times)],
-    ClientFun = new_client_fun('GET', TestReqs),
-    ok = with_server(Transport, ServerFun, ClientFun),
-    ok.
-
-do_POST(Transport, Size, Times) ->
-    ServerFun = fun (Req) ->
-                        Body = Req:recv_body(),
-                        Headers = [{"Content-Type", "application/octet-stream"}],
-                        Req:respond({201, Headers, Body})
-                end,
-    TestReqs = [begin
-                    Path = "/stuff/" ++ integer_to_list(N),
-                    Body = crypto:rand_bytes(Size),
-                    #treq{path=Path, body=Body, xreply=Body}
-                end || N <- lists:seq(1, Times)],
-    ClientFun = new_client_fun('POST', TestReqs),
-    ok = with_server(Transport, ServerFun, ClientFun),
-    ok.
-
-new_client_fun(Method, TestReqs) ->
-    fun (Transport, Port) ->
-            client_request(Transport, Port, Method, TestReqs)
-    end.
-
-client_request(Transport, Port, Method, TestReqs) ->
-    Opts = [binary, {active, false}, {packet, http}],
-    SockFun = case Transport of
-        plain ->
-            {ok, Socket} = gen_tcp:connect("127.0.0.1", Port, Opts),
-            fun (recv) ->
-                    gen_tcp:recv(Socket, 0);
-                ({recv, Length}) ->
-                    gen_tcp:recv(Socket, Length);
-                ({send, Data}) ->
-                    gen_tcp:send(Socket, Data);
-                ({setopts, L}) ->
-                    inet:setopts(Socket, L)
-            end;
-        ssl ->
-            {ok, Socket} = ssl:connect("127.0.0.1", Port, [{ssl_imp, new} | Opts]),
-            fun (recv) ->
-                    ssl:recv(Socket, 0);
-                ({recv, Length}) ->
-                    ssl:recv(Socket, Length);
-                ({send, Data}) ->
-                    ssl:send(Socket, Data);
-                ({setopts, L}) ->
-                    ssl:setopts(Socket, L)
-            end
-    end,
-    client_request(SockFun, Method, TestReqs).
-
-client_request(SockFun, _Method, []) ->
-    {the_end, {error, closed}} = {the_end, SockFun(recv)},
-    ok;
-client_request(SockFun, Method,
-               [#treq{path=Path, body=Body, xreply=ExReply} | Rest]) ->
-    Request = [atom_to_list(Method), " ", Path, " HTTP/1.1\r\n",
-               client_headers(Body, Rest =:= []),
-               "\r\n",
-               Body],
-    ok = SockFun({send, Request}),
-    case Method of
-        'GET' ->
-            {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv);
-        'POST' ->
-            {ok, {http_response, {1,1}, 201, "Created"}} = SockFun(recv)
-    end,
-    ok = SockFun({setopts, [{packet, httph}]}),
-    {ok, {http_header, _, 'Server', _, "MochiWeb" ++ _}} = SockFun(recv),
-    {ok, {http_header, _, 'Date', _, _}} = SockFun(recv),
-    {ok, {http_header, _, 'Content-Type', _, _}} = SockFun(recv),
-    {ok, {http_header, _, 'Content-Length', _, ConLenStr}} = SockFun(recv),
-    ContentLength = list_to_integer(ConLenStr),
-    {ok, http_eoh} = SockFun(recv),
-    ok = SockFun({setopts, [{packet, raw}]}),
-    {payload, ExReply} = {payload, drain_reply(SockFun, ContentLength, <<>>)},
-    ok = SockFun({setopts, [{packet, http}]}),
-    client_request(SockFun, Method, Rest).
-
-client_headers(Body, IsLastRequest) ->
-    ["Host: localhost\r\n",
-     case Body of
-        <<>> ->
-            "";
-        _ ->
-            ["Content-Type: application/octet-stream\r\n",
-             "Content-Length: ", integer_to_list(byte_size(Body)), "\r\n"]
-     end,
-     case IsLastRequest of
-         true ->
-             "Connection: close\r\n";
-         false ->
-             ""
-     end].
-
-drain_reply(_SockFun, 0, Acc) ->
-    Acc;
-drain_reply(SockFun, Length, Acc) ->
-    Sz = erlang:min(Length, 1024),
-    {ok, B} = SockFun({recv, Sz}),
-    drain_reply(SockFun, Length - Sz, <<Acc/bytes, B/bytes>>).
diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk b/rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk
deleted file mode 100644 (file)
index dbbe133..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-APP_NAME:=mochiweb
-
-UPSTREAM_GIT:=https://github.com/rabbitmq/mochiweb.git
-UPSTREAM_REVISION:=680dba8a8a0dd8ee18d03bf814cfb2340bf3bbff
-RETAIN_ORIGINAL_VERSION:=true
-WRAPPER_PATCHES:=10-build-on-R12B-5.patch \
-                20-MAX_RECV_BODY.patch \
-                30-remove-crypto-ssl-dependencies.patch \
-                40-remove-compiler-syntax_tools-dependencies.patch \
-                50-remove-json.patch
-
-# internal.hrl is used by webmachine
-UPSTREAM_INCLUDE_DIRS+=$(CLONE_DIR)/src
-
-ORIGINAL_APP_FILE:=$(CLONE_DIR)/$(APP_NAME).app
-DO_NOT_GENERATE_APP_FILE=true
-
-define package_rules
-
-$(CLONE_DIR)/src/$(APP_NAME).app.src: $(CLONE_DIR)/.done
-
-$(ORIGINAL_APP_FILE): $(CLONE_DIR)/src/$(APP_NAME).app.src
-       cp $(CLONE_DIR)/src/$(APP_NAME).app.src $(ORIGINAL_APP_FILE)
-
-$(PACKAGE_DIR)+clean::
-       rm -rf $(ORIGINAL_APP_FILE)
-
-# This rule is run *before* the one in do_package.mk
-$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done::
-       cp $(CLONE_DIR)/LICENSE $(PACKAGE_DIR)/LICENSE-MIT-Mochi
-
-$(CLONE_DIR)/ebin/mochifmt_records.beam: $(CLONE_DIR)/ebin/pmod_pt.beam
-
-$(CLONE_DIR)/ebin/mochifmt_std.beam: $(CLONE_DIR)/ebin/pmod_pt.beam
-
-$(CLONE_DIR)/ebin/mochifmt_request.beam: $(CLONE_DIR)/ebin/pmod_pt.beam
-
-$(CLONE_DIR)/ebin/mochifmt_response.beam: $(CLONE_DIR)/ebin/pmod_pt.beam
-
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk
deleted file mode 100644 (file)
index 0a1b2ea..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-RELEASABLE:=true
-APP_NAME=rabbitmq_amqp1_0
-DEPS:=rabbitmq-server rabbitmq-erlang-client
-STANDALONE_TEST_COMMANDS:=eunit:test(rabbit_amqp1_0_test,[verbose])
-WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/swiftmq/run-tests.sh
-
-FRAMING_HRL=$(PACKAGE_DIR)/include/rabbit_amqp1_0_framing.hrl
-FRAMING_ERL=$(PACKAGE_DIR)/src/rabbit_amqp1_0_framing0.erl
-CODEGEN=$(PACKAGE_DIR)/codegen.py
-CODEGEN_SPECS=$(PACKAGE_DIR)/spec/messaging.xml $(PACKAGE_DIR)/spec/security.xml $(PACKAGE_DIR)/spec/transport.xml $(PACKAGE_DIR)/spec/transactions.xml
-
-INCLUDE_HRLS+=$(FRAMING_HRL)
-SOURCE_ERLS+=$(FRAMING_ERL)
-
-define package_rules
-
-$(FRAMING_ERL): $(CODEGEN) $(CODEGEN_SPECS)
-       $(CODEGEN) erl $(CODEGEN_SPECS) > $$@
-
-$(FRAMING_HRL): $(CODEGEN) $(CODEGEN_SPECS)
-       $(CODEGEN) hrl $(CODEGEN_SPECS) > $$@
-
-$(PACKAGE_DIR)+clean::
-       rm -f $(FRAMING_HRL) $(FRAMING_ERL)
-
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/lib-java/junit.jar b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/lib-java/junit.jar
deleted file mode 100644 (file)
index 674d71e..0000000
Binary files a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/lib-java/junit.jar and /dev/null differ
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/Makefile b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/Makefile
deleted file mode 100644 (file)
index 0266a00..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-PROTON_VER=0.7
-PROTON_DIR=qpid-proton-$(PROTON_VER)
-PROTON_TARBALL=$(PROTON_DIR).tar.gz
-PROTON_URL=http://www.mirrorservice.org/sites/ftp.apache.org/qpid/proton/$(PROTON_VER)/$(PROTON_TARBALL)
-
-.PHONY: test
-
-test: build/lib
-       ant test
-
-build/lib: $(PROTON_TARBALL)
-       mkdir -p build/tmp
-       tar xvz -C build/tmp -f $(PROTON_TARBALL)
-       cd build/tmp/$(PROTON_DIR)/proton-j && mvn package
-       mkdir -p build/lib
-       cp build/tmp/$(PROTON_DIR)/proton-j/target/proton-j-$(PROTON_VER).jar build/lib
-       cp ../lib-java/*.jar build/lib
-
-clean:
-       rm -rf build $(PROTON_TARBALL)
-
-$(PROTON_TARBALL):
-       wget $(PROTON_URL)
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/build.xml b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/build.xml
deleted file mode 100644 (file)
index a5c50d4..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<project name="RabbitMQ AMQP 1.0 tests using Proton client" default="test">
-    <target name="test-build">
-        <mkdir dir="build/classes"/>
-
-        <javac srcdir="test" destdir="build/classes" debug="true">
-            <classpath>
-                <fileset dir="build/lib">
-                    <include name="**/*.jar"/>
-                </fileset>
-            </classpath>
-        </javac>
-    </target>
-
-    <target name="test" depends="test-build">
-        <mkdir dir="build/test-output"/>
-
-        <junit printSummary="withOutAndErr" fork="yes" failureproperty="test.failed">
-            <classpath>
-                <fileset dir="build/lib">
-                    <include name="**/*.jar"/>
-                </fileset>
-                <pathelement location="build/classes"/>
-            </classpath>
-            <formatter type="plain"/>
-            <test todir="build/test-output" name="com.rabbitmq.amqp1_0.tests.proton.ProtonTests"/>
-        </junit>
-        <fail message="Tests failed" if="test.failed" />
-    </target>
-</project>
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java
deleted file mode 100644 (file)
index a375900..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.rabbitmq.amqp1_0.tests.proton;
-
-import junit.framework.TestCase;
-import org.apache.qpid.proton.amqp.Binary;
-import org.apache.qpid.proton.amqp.messaging.Data;
-import org.apache.qpid.proton.message.Message;
-import org.apache.qpid.proton.message.impl.MessageImpl;
-import org.apache.qpid.proton.messenger.Messenger;
-import org.apache.qpid.proton.messenger.impl.MessengerImpl;
-
-public class ProtonTests extends TestCase {
-    public static final String ADDRESS = "amqp://localhost/amqp-1.0-test";
-    // This uses deprecated classes, yes. I took them from the examples provided...
-
-    public void testRoundTrip() throws Exception {
-        Messenger mng = new MessengerImpl();
-        mng.start();
-        Message msg = new MessageImpl();
-        msg.setAddress(ADDRESS);
-        msg.setSubject("hello");
-        msg.setContentType("application/octet-stream");
-        msg.setBody(new Data(new Binary("hello world".getBytes())));
-        mng.put(msg);
-        mng.send();
-
-        mng.subscribe(ADDRESS);
-        mng.recv();
-        Message msg2 = mng.get();
-        assertEquals(msg.getSubject(), msg2.getSubject());
-        assertEquals(msg.getContentType(), msg2.getContentType());
-        assertEquals(msg.getBody().toString(), msg2.getBody().toString());
-        mng.stop();
-    }
-}
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/src/rabbit_amqp1_0_test.erl b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/src/rabbit_amqp1_0_test.erl
deleted file mode 100644 (file)
index 2be29a2..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_amqp1_0_test).
-
--include("rabbit_amqp1_0.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--import(rabbit_amqp1_0_util, [serial_add/2, serial_diff/2, serial_compare/2]).
-
-serial_arithmetic_test() ->
-    ?assertEqual(1, serial_add(0, 1)),
-    ?assertEqual(16#7fffffff, serial_add(0, 16#7fffffff)),
-    ?assertEqual(0, serial_add(16#ffffffff, 1)),
-    %% Cannot add more than 2 ^ 31 - 1
-    ?assertExit({out_of_bound_serial_addition, _, _},
-                serial_add(200, 16#80000000)),
-    ?assertEqual(1, serial_diff(1, 0)),
-    ?assertEqual(2, serial_diff(1, 16#ffffffff)),
-    ?assertEqual(-2, serial_diff(16#ffffffff, 1)),
-    ?assertExit({indeterminate_serial_diff, _, _},
-                serial_diff(0, 16#80000000)),
-    ?assertExit({indeterminate_serial_diff, _, _},
-                serial_diff(16#ffffffff, 16#7fffffff)),
-    passed.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/Makefile b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/Makefile
deleted file mode 100644 (file)
index 3a1c639..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-CLIENT_DIR=swiftmq_9_7_1_client
-CLIENT_PKG=$(CLIENT_DIR).tar.gz
-
-.PHONY: test
-
-test: build/lib
-       ant test
-
-build/lib: $(CLIENT_PKG)
-       mkdir -p build/tmp
-       tar -zx -f $(CLIENT_PKG) -C build/tmp
-       mkdir -p build/lib
-       mv build/tmp/$(CLIENT_DIR)/jars/*.jar build/lib
-       rm -rf build/tmp
-       cp ../lib-java/*.jar build/lib
-       (cd ../../../rabbitmq-java-client && ant dist)
-       cp ../../../rabbitmq-java-client/build/dist/rabbitmq-client.jar build/lib
-
-$(CLIENT_PKG):
-       @echo
-       @echo You need $(CLIENT_PKG) to run these tests. Unfortunately we can\'t
-       @echo redistribute it. Obtain it from the SwiftMQ website and place it
-       @echo in $(shell pwd).
-       @echo
-       @false
-
-clean:
-       rm -rf build
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/build.xml b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/build.xml
deleted file mode 100644 (file)
index 2152708..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<project name="RabbitMQ AMQP 1.0 tests using SwiftMQ client" default="test">
-    <target name="test-build">
-        <mkdir dir="build/classes"/>
-
-        <javac srcdir="test" destdir="build/classes" debug="true">
-            <classpath>
-                <fileset dir="build/lib">
-                    <include name="**/*.jar"/>
-                </fileset>
-            </classpath>
-        </javac>
-    </target>
-
-    <target name="test" depends="test-build">
-        <mkdir dir="build/test-output"/>
-
-        <junit printSummary="withOutAndErr" fork="yes" failureproperty="test.failed">
-            <classpath>
-                <fileset dir="build/lib">
-                    <include name="**/*.jar"/>
-                </fileset>
-                <pathelement location="build/classes"/>
-            </classpath>
-            <formatter type="plain"/>
-            <test todir="build/test-output" name="com.rabbitmq.amqp1_0.tests.swiftmq.SwiftMQTests"/>
-        </junit>
-        <fail message="Tests failed" if="test.failed" />
-    </target>
-</project>
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh
deleted file mode 100755 (executable)
index 70fab43..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh -e
-make -C $(dirname $0) test
diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java
deleted file mode 100644 (file)
index 2db131f..0000000
+++ /dev/null
@@ -1,385 +0,0 @@
-package com.rabbitmq.amqp1_0.tests.swiftmq;
-
-import com.rabbitmq.client.*;
-import com.swiftmq.amqp.AMQPContext;
-import com.swiftmq.amqp.v100.client.*;
-import com.swiftmq.amqp.v100.client.Connection;
-import com.swiftmq.amqp.v100.client.Consumer;
-import com.swiftmq.amqp.v100.generated.messaging.message_format.*;
-import com.swiftmq.amqp.v100.generated.messaging.message_format.Properties;
-import com.swiftmq.amqp.v100.messaging.AMQPMessage;
-import com.swiftmq.amqp.v100.types.*;
-import junit.framework.TestCase;
-
-import java.io.ByteArrayOutputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.util.*;
-
-public class SwiftMQTests extends TestCase {
-    private static final String host = "localhost";
-    private static final int port = 5672;
-    private static final int INBOUND_WINDOW = 100;
-    private static final int OUTBOUND_WINDOW = 100;
-    private static final int CONSUMER_LINK_CREDIT = 200;
-    private static final String QUEUE = "/queue/test";
-
-    private AMQPMessage msg() {
-        AMQPMessage m = new AMQPMessage();
-        m.addData(data());
-        return m;
-    }
-
-    private Data data() {
-        return new Data("Hello World".getBytes());
-    }
-
-    public void testRoundTrip() throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Producer p = s.createProducer(QUEUE, QoS.AT_LEAST_ONCE);
-        p.send(msg());
-        p.close(); // Settlement happens here
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage m = c.receive();
-        m.accept();
-        assertEquals(1, m.getData().size());
-        assertEquals(data(), m.getData().get(0));
-        conn.close();
-    }
-
-    public void testMessageFragmentation()
-            throws UnsupportedProtocolVersionException, AMQPException, AuthenticationException, IOException {
-        fragmentation(512L,  512);
-        fragmentation(512L,  600);
-        fragmentation(512L,  1024);
-        fragmentation(1024L, 1024);
-    }
-
-    public void fragmentation(long FrameSize, int PayloadSize)
-            throws UnsupportedProtocolVersionException, AMQPException, AuthenticationException, IOException {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.setMaxFrameSize(FrameSize);
-        conn.connect();
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-
-        Producer p = s.createProducer(QUEUE, QoS.AT_LEAST_ONCE);
-        AMQPMessage msg = new AMQPMessage();
-        msg.addData(new Data(new byte [PayloadSize]));
-        p.send(msg);
-        p.close();
-
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage m = c.receive();
-        m.accept();
-        c.close();
-        assertEquals(PayloadSize, m.getData().get(0).getValue().length);
-        conn.close();
-    }
-
-    public void testMessageAnnotations() throws Exception {
-        decorationTest(new DecorationProtocol() {
-            @Override
-            public void decorateMessage(AMQPMessage msg, Map<AMQPString, AMQPType> m) throws IOException {
-                msg.setMessageAnnotations(new MessageAnnotations(m));
-            }
-            @Override
-            public Map<AMQPType, AMQPType> getDecoration(AMQPMessage msg) throws IOException {
-                return msg.getMessageAnnotations().getValue();
-            }
-        }, annotationMap());
-    }
-
-    public void testFooter() throws Exception {
-        decorationTest(new DecorationProtocol() {
-            @Override
-            public void decorateMessage(AMQPMessage msg, Map<AMQPString, AMQPType> m) throws IOException {
-                msg.setFooter(new Footer(m));
-            }
-            @Override
-            public Map<AMQPType, AMQPType> getDecoration(AMQPMessage msg) throws IOException {
-                return msg.getFooter().getValue();
-            }
-        }, annotationMap());
-    }
-
-    public void testDataTypes() throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Producer p = s.createProducer(QUEUE, QoS.AT_LEAST_ONCE);
-        AMQPMessage msg = new AMQPMessage();
-
-        List<AMQPType> al = new ArrayList<AMQPType>();
-        al.add(new AMQPBoolean(true));
-        al.add(new AMQPByte(Byte.MAX_VALUE));
-        al.add(new AMQPChar(Character.CURRENCY_SYMBOL));
-        al.add(new AMQPDecimal64(BigDecimal.TEN));
-        al.add(new AMQPDouble(Double.NaN));
-        al.add(new AMQPInt(Integer.MIN_VALUE));
-        al.add(new AMQPNull());
-        al.add(new AMQPString("\uFFF9"));
-        al.add(new AMQPSymbol(new String(new char[256])));
-        al.add(new AMQPTimestamp(Long.MAX_VALUE));
-        al.add(new AMQPUuid(System.currentTimeMillis(), Long.MIN_VALUE));
-        al.add(new AMQPUnsignedShort(0));
-        al.add(new AMQPArray(AMQPBoolean.FALSE.getCode(), new AMQPBoolean[]{}));
-        al.add(new AmqpSequence(new ArrayList<AMQPType>()));
-        AmqpSequence seq = new AmqpSequence(al);
-        AmqpValue val = new AmqpValue(seq);
-        msg.setAmqpValue(val);
-
-        p.send(msg);
-        p.close();
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage recvMsg = c.receive();
-        recvMsg.accept();
-
-        assertEquals(val.getValue().getValueString(), recvMsg.getAmqpValue().getValue().getValueString());
-        conn.close();
-    }
-
-    public void testAtMostOnce() throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Producer p = s.createProducer(QUEUE, QoS.AT_MOST_ONCE);
-        p.send(msg());
-        p.close();
-
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_MOST_ONCE, false, null);
-        AMQPMessage m = c.receive();
-        assertTrue(m.isSettled());
-
-        s.close();
-        s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_MOST_ONCE, false, null);
-        assertNull(get(c));
-        conn.close();
-    }
-
-    public void testReject() throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Producer p = s.createProducer(QUEUE, QoS.AT_LEAST_ONCE);
-        p.send(msg());
-        p.close();
-
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage m = c.receive();
-        m.reject();
-        assertNull(get(c));
-        conn.close();
-    }
-
-    public void testRedelivery() throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Producer p = s.createProducer(QUEUE, QoS.AT_MOST_ONCE);
-        p.send(msg());
-        p.close();
-
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage m1 = c.receive();
-        assertTrue(m1.getHeader().getFirstAcquirer().getValue());
-        assertFalse(m1.isSettled());
-
-        s.close();
-        s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage m2 = c.receive();
-        m2.accept();
-
-        assertTrue(compareMessageData(m1, m2));
-        assertFalse(m2.getHeader().getFirstAcquirer().getValue());
-        assertNull(get(c));
-        conn.close();
-    }
-
-    public void testRouting() throws Exception {
-        route("test",                      QUEUE,                  "",         true);
-        route(QUEUE,                      "test",                  "",         true);
-        route("test",                     "test",                  "",         true);
-
-        route("/topic/#.c.*",              "/topic/a.b.c.d",        "",        true);
-        route("/topic/#.c.*",              "/exchange/amq.topic",   "a.b.c.d", true);
-        route("/exchange/amq.topic/#.y.*", "/topic/w.x.y.z",        "",        true);
-        route("/exchange/amq.topic/#.y.*", "/exchange/amq.topic",   "w.x.y.z", true);
-
-        route("/exchange/amq.fanout/",     "/exchange/amq.fanout",  "",        true);
-        route("/exchange/amq.direct/",     "/exchange/amq.direct",  "",        true);
-        route("/exchange/amq.direct/a",    "/exchange/amq.direct",  "a",       true);
-
-        route("/amq/queue/test",           QUEUE,                   "",        true);
-        route(QUEUE,                       "/amq/queue/test",       "",        true);
-        route("/amq/queue/test",           "/amq/queue/test",       "",        true);
-
-        route("/exchange/amq.direct/b",    "/exchange/amq.direct",  "a",       false);
-        route(QUEUE,                       "/exchange/amq.fanout",  "",        false);
-        route(QUEUE,                       "/exchange/amq.headers", "",        false);
-        emptyQueue(QUEUE);
-    }
-
-    public void testRoutingInvalidRoutes() throws Exception {
-        ConnectionFactory factory = new ConnectionFactory();
-        com.rabbitmq.client.Connection connection = factory.newConnection();
-        Channel channel = connection.createChannel();
-        channel.queueDeclare("transient", false, false, false, null);
-        connection.close();
-
-        for (String dest : Arrays.asList("/exchange/missing", "/queue/transient", "/fruit/orange")) {
-            routeInvalidSource(dest);
-            routeInvalidTarget(dest);
-        }
-    }
-
-    private void emptyQueue(String q) throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Consumer c = s.createConsumer(q, CONSUMER_LINK_CREDIT, QoS.AT_MOST_ONCE, false, null);
-        AMQPMessage m;
-        while ((m = get(c)) != null);
-        conn.close();
-    }
-
-    // Whatever Consumer.receiveNoWait() does, it does not involve the drain
-    // flag, so it's clearly more a case of "have any messages arrived?" rather
-    // than "has the queue got any messages?" Therefore we have an icky timeout
-    // to give the server time to deliver messages. Really we want a way to use
-    // drain...
-    private AMQPMessage get(Consumer c) {
-        return c.receive(100);
-    }
-
-    private void route(String consumerSource, String producerTarget, String routingKey, boolean succeed) throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-
-        Consumer c = s.createConsumer(consumerSource, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        Producer p = s.createProducer(producerTarget, QoS.AT_LEAST_ONCE);
-        AMQPMessage msg = msg();
-        AmqpValue sentinel = new AmqpValue(new AMQPDouble(Math.random()));
-        msg.setAmqpValue(sentinel);
-        Properties props = new Properties();
-        props.setSubject(new AMQPString(routingKey));
-        msg.setProperties(props);
-        p.send(msg);
-
-        if (succeed) {
-            AMQPMessage m = c.receive();
-            assertNotNull(m);
-            assertEquals(sentinel.getValue().getValueString(), m.getAmqpValue().getValue().getValueString());
-            m.accept();
-        } else {
-            assertNull(get(c));
-        }
-        c.close();
-        p.close();
-        conn.close();
-    }
-
-    private void routeInvalidSource(String consumerSource) throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        try {
-            Consumer c = s.createConsumer(consumerSource, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-            c.close();
-            fail("Source '" + consumerSource + "' should fail");
-        }
-        catch (Exception e) {
-            // no-op
-        }
-        finally {
-            conn.close();
-        }
-    }
-
-    private void routeInvalidTarget(String producerTarget) throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        try {
-            Producer p = s.createProducer(producerTarget, QoS.AT_LEAST_ONCE);
-            p.close();
-            fail("Target '" + producerTarget + "' should fail");
-        }
-        catch (Exception e) {
-            // no-op
-        }
-        finally {
-            conn.close();
-        }
-    }
-
-    // TODO: generalise to a comparison of all immutable parts of messages
-    private boolean compareMessageData(AMQPMessage m1, AMQPMessage m2) throws IOException {
-        ByteArrayOutputStream b1 = new ByteArrayOutputStream();
-        ByteArrayOutputStream b2 = new ByteArrayOutputStream();
-
-        m1.getData().get(0).writeContent(new DataOutputStream(b1));
-        m2.getData().get(0).writeContent(new DataOutputStream(b2));
-        return Arrays.equals(b1.toByteArray(), b2.toByteArray());
-    }
-
-    private void decorationTest(DecorationProtocol d, Map<AMQPString, AMQPType> map) throws Exception {
-        AMQPContext ctx = new AMQPContext(AMQPContext.CLIENT);
-        Connection conn = new Connection(ctx, host, port, false);
-        conn.connect();
-        Session s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW);
-        Producer p = s.createProducer(QUEUE, QoS.AT_LEAST_ONCE);
-        AMQPMessage msg = msg();
-
-        d.decorateMessage(msg, map);
-        p.send(msg);
-        p.close();
-        Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null);
-        AMQPMessage recvMsg = c.receive();
-        recvMsg.accept();
-
-        compareMaps(map, d.getDecoration(recvMsg));
-        conn.close();
-    }
-
-    private void compareMaps(Map<AMQPString, AMQPType> m1, Map<AMQPType, AMQPType> m2){
-        Set e1 = m1.entrySet();
-        Set e2 = m2.entrySet();
-        assertTrue(e1.containsAll(e2));
-        assertTrue(e2.containsAll(e1));
-    }
-
-    private Map<AMQPString, AMQPType> annotationMap() throws IOException {
-        Map<AMQPString, AMQPType> annotations = new HashMap<AMQPString, AMQPType>();
-        // the spec allows keys to be symbol or ulong only, but the library only allows string
-        annotations.put(new AMQPString("key1"), new AMQPString("value1"));
-        annotations.put(new AMQPString("key2"), new AMQPString("value2"));
-        return annotations;
-    }
-
-    private interface DecorationProtocol {
-        void decorateMessage(AMQPMessage msg, Map<AMQPString, AMQPType> m) throws IOException;
-        Map<AMQPType, AMQPType> getDecoration(AMQPMessage _) throws IOException;
-    }
-
-}
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml
deleted file mode 100644 (file)
index 6b022a8..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-sudo: true
-language: erlang
-notifications:
-  email:
-    - alerts@rabbitmq.com
-addons:
-  apt:
-    packages:
-      - slapd
-      - ldap-utils
-      - xsltproc
-otp_release:
-  - "R16B03-1"
-  - "17.5"
-  - "18.0"
-install:
-  - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi
-  - cd $HOME/rabbitmq-public-umbrella
-  - make co
-  - make up
-services:
-  - slapd
-before_script:
-  - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG"
-  - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]}
-  - rm -rf ${TEST_DIR}
-  - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR}
-  - cd ${TEST_DIR}
-  - ./example/setup.sh
-script: make test
-before_cache:
-  - rm -rf ${TEST_DIR}
-  - cd $HOME
-cache:
-  apt: true
-  directories:
-    - $HOME/rabbitmq-public-umbrella
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README
deleted file mode 100644 (file)
index 278cda8..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-Requirements
-============
-
-You can build and install it like any other plugin (see
-http://www.rabbitmq.com/plugin-development.html).
-
-Documentation
-=============
-
-See http://www.rabbitmq.com/ldap.html
-
-Limitations
-===========
-
-Currently this plugin is rather chatty with LDAP connections when
-doing authorisation over LDAP - every time RabbitMQ needs to do an
-authorisation query it starts a new LDAP connection. However, RabbitMQ
-does have a per-channel authorisation cache, so this is not too awful.
-
-There might need to be more types of queries.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation
deleted file mode 100644 (file)
index 6e0abe0..0000000
+++ /dev/null
@@ -1 +0,0 @@
-See http://www.rabbitmq.com/ldap.html
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-tests b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-tests
deleted file mode 100644 (file)
index eac53c7..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-The tests *require* a locally installed LDAP server with some
-predefined objects inside. If there's no LDAP server running on port
-389, they will be skipped.
-
-On Debian / Ubuntu you can just:
-
-$ ./example/setup.sh
-$ make test
-
- - but be aware that this will wipe out your local OpenLDAP installation.
-
-Poke around in example/ if using any other distro, you can probably
-make it work.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/etc/rabbit-test.config b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/etc/rabbit-test.config
deleted file mode 100644 (file)
index b65d9c4..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-%% -*- erlang -*-
-[{rabbit, [{default_vhost, <<"test">>}]},
- {rabbitmq_auth_backend_ldap,
-  [ {servers,            ["localhost"]},
-    {user_dn_pattern,    "cn=${username},ou=People,dc=example,dc=com"},
-    {other_bind,         anon},
-    {use_ssl,            false},
-    {port,               389},
-    {log,                true},
-    {tag_queries,        [{administrator, {constant, false}}]},
-    {vhost_access_query, {exists, "ou=${vhost},ou=vhosts,dc=example,dc=com"}},
-    {resource_access_query,
-     {for, [{resource, exchange,
-             {for, [{permission, configure,
-                     {in_group, "cn=wheel,ou=groups,dc=example,dc=com"}
-                    },
-                    {permission, write, {constant, true}},
-                    {permission, read,
-                     {match, {string, "${name}"},
-                             {string, "^xch-${username}-.*"}}
-                    }
-                   ]}},
-            {resource, queue,
-             {for, [{permission, configure,
-                     {match, {attribute, "${user_dn}", "description"},
-                             {string, "can-declare-queues"}}
-                    },
-                    {permission, write, {constant, true}},
-                    {permission, read,
-                     {'or',
-                      [{'and',
-                        [{equals, "${name}", "test1"},
-                         {equals, "${username}", "Simon MacMullen"}]},
-                       {'and',
-                        [{equals, "${name}", "test2"},
-                         {'not', {equals, "${username}", "Mike Bridgen"}}]}
-                      ]}}
-                   ]}}
-            ]}},
-    {tag_queries, [{administrator, {constant, false}},
-                   {management,    {constant, false}}]}
-  ]}
-].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/README b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/README
deleted file mode 100644 (file)
index d2969ac..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-This is a very simple example, designed to be set up with the modern
-Debian / Ubuntu packaging of OpenLDAP. Running setup.sh after "apt-get
-install slapd" will wipe out any existing LDAP database and get you:
-
-* A domain
-* An admin user
-* A couple of normal users
-* A group containing the users
-* An OU representing a vhost
-
-These correspond to the examples mentioned in the documentation.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif
deleted file mode 100644 (file)
index 5a5c8d0..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-dn: ou=groups,dc=example,dc=com
-objectclass:organizationalunit
-ou: groups
-
-dn: cn=wheel,ou=groups,dc=example,dc=com
-objectclass: groupOfNames
-cn: wheel
-member: cn=Simon MacMullen,ou=people,dc=example,dc=com
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/people.ldif b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/people.ldif
deleted file mode 100644 (file)
index 444879f..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-dn: dc=example,dc=com
-objectClass: top
-objectClass: dcObject
-objectclass: organization
-o: example.com
-dc: example
-description: Example
-
-dn: ou=people,dc=example,dc=com
-objectClass: organizationalUnit
-ou: people
-
-dn: cn=Simon MacMullen,ou=people,dc=example,dc=com
-objectClass: person
-cn: Simon MacMullen
-sn: MacMullen
-userPassword: password
-description: can-declare-queues
-
-dn: cn=Mike Bridgen,ou=people,dc=example,dc=com
-objectClass: person
-cn: Mike Bridgen
-sn: Bridgen
-userPassword: password
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/rabbit.ldif b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/rabbit.ldif
deleted file mode 100644 (file)
index e43eac9..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-dn: ou=vhosts,dc=example,dc=com
-objectClass: organizationalUnit
-ou: vhosts
-
-dn: ou=test,ou=vhosts,dc=example,dc=com
-objectClass: top
-objectClass: organizationalUnit
-ou: test
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh
deleted file mode 100755 (executable)
index bca4dcb..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh -e
-
-# Based on instructions found at
-# http://ubuntuforums.org/showthread.php?p=8161118#post8161118
-# - yes that does seem to be the most authoritative place.
-
-sudo apt-get --yes purge slapd
-sudo rm -rf /var/lib/ldap
-sudo apt-get --yes install slapd ldap-utils
-sleep 1
-
-DIR=$(dirname $0)
-
-sudo ldapadd -Y EXTERNAL -H ldapi:/// -f ${DIR}/global.ldif
-ldapadd -x -D cn=admin,dc=example,dc=com -w admin -f ${DIR}/people.ldif
-ldapadd -x -D cn=admin,dc=example,dc=com -w admin -f ${DIR}/groups.ldif
-ldapadd -x -D cn=admin,dc=example,dc=com -w admin -f ${DIR}/rabbit.ldif
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk
deleted file mode 100644 (file)
index 02c22ee..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-server rabbitmq-erlang-client eldap-wrapper
-
-ifeq ($(shell nc -z localhost 389 && echo true),true)
-WITH_BROKER_TEST_COMMANDS:=eunit:test([rabbit_auth_backend_ldap_unit_test,rabbit_auth_backend_ldap_test],[verbose])
-WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/etc/rabbit-test
-else
-$(warning Not running LDAP tests; no LDAP server found on localhost)
-endif
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_test.erl b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_test.erl
deleted file mode 100644 (file)
index c340d68..0000000
+++ /dev/null
@@ -1,250 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_auth_backend_ldap_test).
-
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--define(SIMON_NAME, "Simon MacMullen").
--define(MIKEB_NAME, "Mike Bridgen").
--define(VHOST, "test").
-
--define(SIMON, #amqp_params_network{username     = << ?SIMON_NAME >>,
-                                    password     = <<"password">>,
-                                    virtual_host = << ?VHOST >>}).
-
--define(MIKEB, #amqp_params_network{username     = << ?MIKEB_NAME >>,
-                                    password     = <<"password">>,
-                                    virtual_host = << ?VHOST >>}).
-
-%%--------------------------------------------------------------------
-
-ldap_only_test_() ->
-    { setup,
-      fun () -> ok = application:set_env(rabbit, auth_backends,
-          [rabbit_auth_backend_ldap]) end,
-      fun (_) -> ok = application:unset_env(rabbit, auth_backends) end,
-      [ {"LDAP Login", login()},
-        {"LDAP In group", in_group()},
-        {"LDAP Constant", const()},
-        {"LDAP String match", string_match()},
-        {"LDAP Boolean check", boolean_logic()},
-        {"LDAP Tags", tag_check([])}
-    ]}.
-
-ldap_and_internal_test_() ->
-    { setup,
-      fun () ->
-          ok = application:set_env(rabbit, auth_backends,
-              [{rabbit_auth_backend_ldap, rabbit_auth_backend_internal}]),
-          ok = control_action(add_user, [ ?SIMON_NAME, ""]),
-          ok = control_action(set_permissions, [ ?SIMON_NAME, "prefix-.*", "prefix-.*", "prefix-.*"]),
-          ok = control_action(set_user_tags, [ ?SIMON_NAME, "management", "foo"]),
-          ok = control_action(add_user, [ ?MIKEB_NAME, ""]),
-          ok = control_action(set_permissions, [ ?MIKEB_NAME, "", "", ""])
-      end,
-      fun (_) ->
-          ok = application:unset_env(rabbit, auth_backends),
-          ok = control_action(delete_user, [ ?SIMON_NAME ]),
-          ok = control_action(delete_user, [ ?MIKEB_NAME ])
-      end,
-      [ {"LDAP&Internal Login", login()},
-        {"LDAP&Internal Permissions", permission_match()},
-        {"LDAP&Internal Tags", tag_check([management, foo])}
-    ]}.
-
-internal_followed_ldap_and_internal_test_() ->
-    { setup,
-      fun () ->
-          ok = application:set_env(rabbit, auth_backends,
-              [rabbit_auth_backend_internal, {rabbit_auth_backend_ldap, rabbit_auth_backend_internal}]),
-          ok = control_action(add_user, [ ?SIMON_NAME, ""]),
-          ok = control_action(set_permissions, [ ?SIMON_NAME, "prefix-.*", "prefix-.*", "prefix-.*"]),
-          ok = control_action(set_user_tags, [ ?SIMON_NAME, "management", "foo"]),
-          ok = control_action(add_user, [ ?MIKEB_NAME, ""]),
-          ok = control_action(set_permissions, [ ?MIKEB_NAME, "", "", ""])
-      end,
-      fun (_) ->
-          ok = application:unset_env(rabbit, auth_backends),
-          ok = control_action(delete_user, [ ?SIMON_NAME ]),
-          ok = control_action(delete_user, [ ?MIKEB_NAME ])
-      end,
-      [ {"Internal, LDAP&Internal Login", login()},
-        {"Internal, LDAP&Internal Permissions", permission_match()},
-        {"Internal, LDAP&Internal Tags", tag_check([management, foo])}
-    ]}.
-
-
-%%--------------------------------------------------------------------
-
-login() ->
-    [test_login(Env, L, case {LGood, EnvGood} of
-                            {good, good} -> fun succ/1;
-                            _            -> fun fail/1
-                        end) || {LGood, L}     <- logins(),
-                                {EnvGood, Env} <- login_envs()].
-
-logins() ->
-    [{bad, #amqp_params_network{}},
-     {bad, #amqp_params_network{username = <<"Simon MacMullen">>}},
-     {bad, #amqp_params_network{username = <<"Simon MacMullen">>,
-                                password = <<"password">>}},
-     {good, ?SIMON},
-     {good, ?MIKEB}].
-
-login_envs() ->
-    [{good, base_login_env()},
-     {good, dn_lookup_pre_bind_env()},
-     {good, other_bind_admin_env()},
-     {good, other_bind_anon_env()},
-     {bad, other_bind_broken_env()}].
-
-base_login_env() ->
-    [{user_dn_pattern,    "cn=${username},ou=People,dc=example,dc=com"},
-     {dn_lookup_attribute, none},
-     {dn_lookup_base,      none},
-     {dn_lookup_bind,      as_user},
-     {other_bind,          as_user}].
-
-%% TODO configure OpenLDAP to allow a dn_lookup_post_bind_env()
-dn_lookup_pre_bind_env() ->
-    [{user_dn_pattern,    "${username}"},
-     {dn_lookup_attribute, "cn"},
-     {dn_lookup_base,      "OU=People,DC=example,DC=com"},
-     {dn_lookup_bind,      {"cn=admin,dc=example,dc=com", "admin"}}].
-
-other_bind_admin_env() ->
-    [{other_bind, {"cn=admin,dc=example,dc=com", "admin"}}].
-
-other_bind_anon_env() ->
-    [{other_bind, anon}].
-
-other_bind_broken_env() ->
-    [{other_bind, {"cn=admin,dc=example,dc=com", "admi"}}].
-
-test_login(Env, Login, ResultFun) ->
-    ?_test(try
-               set_env(Env),
-               ResultFun(Login)
-           after
-               set_env(base_login_env())
-           end).
-
-set_env(Env) ->
-    [application:set_env(rabbitmq_auth_backend_ldap, K, V) || {K, V} <- Env].
-
-succ(Login) -> ?assertMatch({ok, _}, amqp_connection:start(Login)).
-fail(Login) -> ?assertMatch({error, _}, amqp_connection:start(Login)).
-
-%%--------------------------------------------------------------------
-
-in_group() ->
-    X = [#'exchange.declare'{exchange = <<"test">>}],
-    test_resource_funs([{?SIMON, X, ok},
-                         {?MIKEB, X, fail}]).
-
-const() ->
-    Q = [#'queue.declare'{queue = <<"test">>}],
-    test_resource_funs([{?SIMON, Q, ok},
-                        {?MIKEB, Q, fail}]).
-
-string_match() ->
-    B = fun(N) ->
-                [#'exchange.declare'{exchange = N},
-                 #'queue.declare'{queue = <<"test">>},
-                 #'queue.bind'{exchange = N, queue = <<"test">>}]
-        end,
-    test_resource_funs([{?SIMON, B(<<"xch-Simon MacMullen-abc123">>), ok},
-                        {?SIMON, B(<<"abc123">>),                     fail},
-                        {?SIMON, B(<<"xch-Someone Else-abc123">>),    fail}]).
-
-boolean_logic() ->
-    Q1 = [#'queue.declare'{queue = <<"test1">>},
-          #'basic.consume'{queue = <<"test1">>}],
-    Q2 = [#'queue.declare'{queue = <<"test2">>},
-          #'basic.consume'{queue = <<"test2">>}],
-    [test_resource_fun(PTR) || PTR <- [{?SIMON, Q1, ok},
-                                       {?SIMON, Q2, ok},
-                                       {?MIKEB, Q1, fail},
-                                       {?MIKEB, Q2, fail}]].
-
-permission_match() ->
-    B = fun(N) ->
-                [#'exchange.declare'{exchange = N},
-                 #'queue.declare'{queue = <<"prefix-test">>},
-                 #'queue.bind'{exchange = N, queue = <<"prefix-test">>}]
-        end,
-    test_resource_funs([{?SIMON, B(<<"prefix-abc123">>),              ok},
-                        {?SIMON, B(<<"abc123">>),                     fail},
-                        {?SIMON, B(<<"xch-Simon MacMullen-abc123">>), fail}]).
-
-tag_check(Tags) ->
-    fun() ->
-            {ok, User} = rabbit_access_control:check_user_pass_login(
-                        << ?SIMON_NAME >>, <<"password">>),
-            ?assertEqual(Tags, User#user.tags)
-    end.
-
-
-%%--------------------------------------------------------------------
-
-test_resource_funs(PTRs) -> [test_resource_fun(PTR) || PTR <- PTRs].
-
-test_resource_fun({Person, Things, Result}) ->
-    fun() ->
-            {ok, Conn} = amqp_connection:start(Person),
-            {ok, Ch} = amqp_connection:open_channel(Conn),
-            ?assertEqual(Result,
-                         try
-                             [amqp_channel:call(Ch, T) || T <- Things],
-                             amqp_connection:close(Conn),
-                             ok
-                         catch exit:_ -> fail
-                         end)
-    end.
-
-control_action(Command, Args) ->
-    control_action(Command, node(), Args, default_options()).
-
-control_action(Command, Args, NewOpts) ->
-    control_action(Command, node(), Args,
-                   expand_options(default_options(), NewOpts)).
-
-control_action(Command, Node, Args, Opts) ->
-    case catch rabbit_control_main:action(
-                 Command, Node, Args, Opts,
-                 fun (Format, Args1) ->
-                         io:format(Format ++ " ...~n", Args1)
-                 end) of
-        ok ->
-            io:format("done.~n"),
-            ok;
-        Other ->
-            io:format("failed.~n"),
-            Other
-    end.
-
-default_options() -> [{"-p", ?VHOST}, {"-q", "false"}].
-
-expand_options(As, Bs) ->
-    lists:foldl(fun({K, _}=A, R) ->
-                        case proplists:is_defined(K, R) of
-                            true -> R;
-                            false -> [A | R]
-                        end
-                end, Bs, As).
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl
deleted file mode 100644 (file)
index 47223f9..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_auth_backend_ldap_unit_test).
-
--include_lib("eunit/include/eunit.hrl").
-
-fill_test() ->
-    F = fun(Fmt, Args, Res) ->
-                ?assertEqual(Res, rabbit_auth_backend_ldap_util:fill(Fmt, Args))
-        end,
-    F("x${username}x", [{username,  "ab"}],     "xabx"),
-    F("x${username}x", [{username,  ab}],       "xabx"),
-    F("x${username}x", [{username,  <<"ab">>}], "xabx"),
-    F("x${username}x", [{username,  ""}],       "xx"),
-    F("x${username}x", [{fusername, "ab"}],     "x${username}x"),
-    F("x${usernamex",  [{username,  "ab"}],     "x${usernamex"),
-    F("x${username}x", [{username,  "a\\b"}],   "xa\\bx"),
-    F("x${username}x", [{username,  "a&b"}],    "xa&bx"),
-    ok.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk
deleted file mode 100644 (file)
index baa4c03..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-server rabbitmq-erlang-client
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml
deleted file mode 100644 (file)
index 09fbd63..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-sudo: false
-language: erlang
-addons:
-  apt:
-    packages:
-      - xsltproc
-otp_release:
-  - R16B03-1
-  - 17.5
-  - 18.0
-install:
-  - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi
-  - cd $HOME/rabbitmq-public-umbrella
-  - make co
-  - make up
-before_script:
-  - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG"
-  - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]}
-  - rm -rf ${TEST_DIR}
-  - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR}
-  - cd ${TEST_DIR}
-script: make test
-before_cache:
-  - rm -rf ${TEST_DIR}
-  - cd $HOME
-cache:
-  apt: true
-  directories:
-    - $HOME/rabbitmq-public-umbrella
-notifications:
-  email:
-    - alerts@rabbitmq.com
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/README.md b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/README.md
deleted file mode 100644 (file)
index 2554d53..0000000
+++ /dev/null
@@ -1,137 +0,0 @@
-# RabbitMQ Consistent Hash Exchange Type
-
-This plugin adds a consistent-hash exchange type to RabbitMQ.
-
-In various scenarios, you may wish to ensure that messages sent to an
-exchange are consistently and equally distributed across a number of
-different queues based on the routing key of the message (or a
-nominated header, see "Routing on a header" below). You could arrange
-for this to occur yourself by using a direct or topic exchange,
-binding queues to that exchange and then publishing messages to that
-exchange that match the various binding keys.
-
-However, arranging things this way can be problematic:
-
-1. It is difficult to ensure that all queues bound to the exchange
-will receive a (roughly) equal number of messages without baking in to
-the publishers quite a lot of knowledge about the number of queues and
-their bindings.
-
-2. If the number of queues changes, it is not easy to ensure that the
-new topology still distributes messages between the different queues
-evenly.
-
-[Consistent Hashing](http://en.wikipedia.org/wiki/Consistent_hashing)
-is a hashing technique whereby each bucket appears at multiple points
-throughout the hash space, and the bucket selected is the nearest
-higher (or lower, it doesn't matter, provided it's consistent) bucket
-to the computed hash (and the hash space wraps around). The effect of
-this is that when a new bucket is added or an existing bucket removed,
-only a very few hashes change which bucket they are routed to.
-
-In the case of Consistent Hashing as an exchange type, the hash is
-calculated from the hash of the routing key of each message
-received. Thus messages that have the same routing key will have the
-same hash computed, and thus will be routed to the same queue,
-assuming no bindings have changed.
-
-When you bind a queue to a consistent-hash exchange, the binding key
-is a number-as-a-string which indicates the number of points in the
-hash space at which you wish the queue to appear. The actual points
-are generated randomly.
-
-So, if you wish for queue A to receive twice as many messages as queue
-B, then you bind the queue A with a binding key of twice the number
-(as a string -- binding keys are always strings) of the binding key of
-the binding to queue B.
-
-Each message gets delivered to at most one queue. Normally, each
-message gets delivered to exactly one queue, but there is a race
-between the determination of which queue to send a message to, and the
-deletion/death of that queue that does permit the possibility of the
-message being sent to a queue which then disappears before the message
-is processed. Hence in general, at most one queue.
-
-The exchange type is "x-consistent-hash".
-
-Here is an example using the Erlang client:
-
-    -include_lib("amqp_client/include/amqp_client.hrl").
-    
-    test() ->
-        {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-        {ok, Chan} = amqp_connection:open_channel(Conn),
-        Queues = [<<"q0">>, <<"q1">>, <<"q2">>, <<"q3">>],
-        amqp_channel:call(Chan,
-                          #'exchange.declare' {
-                            exchange = <<"e">>, type = <<"x-consistent-hash">>
-                          }),
-        [amqp_channel:call(Chan, #'queue.declare' { queue = Q }) || Q <- Queues],
-        [amqp_channel:call(Chan, #'queue.bind' { queue = Q,
-                                                 exchange = <<"e">>,
-                                                 routing_key = <<"10">> })
-         || Q <- [<<"q0">>, <<"q1">>]],
-        [amqp_channel:call(Chan, #'queue.bind' { queue = Q,
-                                                 exchange = <<"e">>,
-                                                 routing_key = <<"20">> })
-         || Q <- [<<"q2">>, <<"q3">>]],
-        Msg = #amqp_msg { props = #'P_basic'{}, payload = <<>> },
-        [amqp_channel:call(Chan,
-                           #'basic.publish'{
-                             exchange = <<"e">>,
-                             routing_key = list_to_binary(
-                                             integer_to_list(
-                                               random:uniform(1000000)))
-                           }, Msg) || _ <- lists:seq(1,100000)],
-        amqp_connection:close(Conn),
-        ok.
-
-As you can see, the queues `q0` and `q1` get bound each with 10 points
-in the hash space to the exchange `e` which means they'll each get
-roughly the same number of messages. The queues `q2` and `q3` however,
-get 20 points each which means they'll each get roughly the same
-number of messages too, but that will be approximately twice as many
-as `q0` and `q1`. We then publish 100,000 messages to our exchange
-with random routing keys. After this has completed, running
-`rabbitmqctl list_queues` should show that the messages have been
-distributed approximately as desired.
-
-Note the `routing_key`s in the bindings are numbers-as-strings. This
-is because AMQP specifies the routing_key must be a string.
-
-The more points in the hash space each binding has, the closer the
-actual distribution will be to the desired distribution (as indicated
-by the ratio of points by binding). However, large numbers of points
-(many thousands) will substantially decrease performance of the
-exchange type.
-
-Equally, it is important to ensure that the messages being published
-to the exchange have a range of different `routing_key`s: if a very
-small set of routing keys are being used then there's a possibility of
-messages not being evenly distributed between the various queues. If
-the routing key is a pseudo-random session ID or such, then good
-results should follow.
-
-## Routing on a header
-
-Under most circumstances the routing key is a good choice for something to
-hash. However, in some cases you need to use the routing key for some other
-purpose (for example with more complex routing involving exchange to
-exchange bindings). In this case you can configure the consistent hash
-exchange to route based on a named header instead. To do this, declare the
-exchange with a string argument called "hash-header" naming the header to
-be used. For example using the Erlang client as above:
-
-    amqp_channel:call(
-      Chan, #'exchange.declare' {
-              exchange  = <<"e">>,
-              type      = <<"x-consistent-hash">>,
-              arguments = [{<<"hash-header">>, longstr, <<"hash-me">>}]
-            }).
-
-If you specify "hash-header" and then publish messages without the named
-header, they will all get routed to the same (arbitrarily-chosen) queue.
-
-Any comments or feedback welcome, to the
-[rabbitmq-discuss mailing list](https://lists.rabbitmq.com/cgi-bin/mailman/listinfo/rabbitmq-discuss)
-or info@rabbitmq.com.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk
deleted file mode 100644 (file)
index 151c43c..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-server rabbitmq-erlang-client
-WITH_BROKER_TEST_COMMANDS:=rabbit_exchange_type_consistent_hash_test:test()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl
deleted file mode 100644 (file)
index 9bb619d..0000000
+++ /dev/null
@@ -1,137 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ Consistent Hash Exchange.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2011-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_exchange_type_consistent_hash_test).
--export([test/0]).
--include_lib("amqp_client/include/amqp_client.hrl").
--include_lib("eunit/include/eunit.hrl").
-
-%% Because the routing is probabilistic, we can't really test a great
-%% deal here.
-
-test() ->
-    %% Run the test twice to test we clean up correctly
-    t([<<"q0">>, <<"q1">>, <<"q2">>, <<"q3">>]),
-    t([<<"q4">>, <<"q5">>, <<"q6">>, <<"q7">>]).
-
-t(Qs) ->
-    ok = test_with_rk(Qs),
-    ok = test_with_header(Qs),
-    ok = test_binding_with_negative_routing_key(),
-    ok = test_binding_with_non_numeric_routing_key(),
-    ok.
-
-test_with_rk(Qs) ->
-    test0(fun () ->
-                  #'basic.publish'{exchange = <<"e">>, routing_key = rnd()}
-          end,
-          fun() ->
-                  #amqp_msg{props = #'P_basic'{}, payload = <<>>}
-          end, [], Qs).
-
-test_with_header(Qs) ->
-    test0(fun () ->
-                  #'basic.publish'{exchange = <<"e">>}
-          end,
-          fun() ->
-                  H = [{<<"hashme">>, longstr, rnd()}],
-                  #amqp_msg{props = #'P_basic'{headers = H}, payload = <<>>}
-          end, [{<<"hash-header">>, longstr, <<"hashme">>}], Qs).
-
-rnd() ->
-    list_to_binary(integer_to_list(random:uniform(1000000))).
-
-test0(MakeMethod, MakeMsg, DeclareArgs, [Q1, Q2, Q3, Q4] = Queues) ->
-    Count = 10000,
-
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Chan} = amqp_connection:open_channel(Conn),
-    #'exchange.declare_ok'{} =
-        amqp_channel:call(Chan,
-                          #'exchange.declare' {
-                            exchange = <<"e">>,
-                            type = <<"x-consistent-hash">>,
-                            auto_delete = true,
-                            arguments = DeclareArgs
-                          }),
-    [#'queue.declare_ok'{} =
-         amqp_channel:call(Chan, #'queue.declare' {
-                             queue = Q, exclusive = true}) || Q <- Queues],
-    [#'queue.bind_ok'{} =
-         amqp_channel:call(Chan, #'queue.bind' {queue = Q,
-                                                 exchange = <<"e">>,
-                                                 routing_key = <<"10">>})
-     || Q <- [Q1, Q2]],
-    [#'queue.bind_ok'{} =
-         amqp_channel:call(Chan, #'queue.bind' {queue = Q,
-                                                 exchange = <<"e">>,
-                                                 routing_key = <<"20">>})
-     || Q <- [Q3, Q4]],
-    #'tx.select_ok'{} = amqp_channel:call(Chan, #'tx.select'{}),
-    [amqp_channel:call(Chan,
-                       MakeMethod(),
-                       MakeMsg()) || _ <- lists:duplicate(Count, const)],
-    amqp_channel:call(Chan, #'tx.commit'{}),
-    Counts =
-        [begin
-            #'queue.declare_ok'{message_count = M} =
-                 amqp_channel:call(Chan, #'queue.declare' {queue     = Q,
-                                                           exclusive = true}),
-             M
-         end || Q <- Queues],
-    Count = lists:sum(Counts), %% All messages got routed
-    [true = C > 0.01 * Count || C <- Counts], %% We are not *grossly* unfair
-    amqp_channel:call(Chan, #'exchange.delete' {exchange = <<"e">>}),
-    [amqp_channel:call(Chan, #'queue.delete' {queue = Q}) || Q <- Queues],
-    amqp_channel:close(Chan),
-    amqp_connection:close(Conn),
-    ok.
-
-test_binding_with_negative_routing_key() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Chan} = amqp_connection:open_channel(Conn),
-    Declare1 = #'exchange.declare'{exchange = <<"bind-fail">>,
-                                    type = <<"x-consistent-hash">>},
-    #'exchange.declare_ok'{} = amqp_channel:call(Chan, Declare1),
-    Q = <<"test-queue">>,
-    Declare2 = #'queue.declare'{queue = Q},
-    #'queue.declare_ok'{} = amqp_channel:call(Chan, Declare2),
-    process_flag(trap_exit, true),
-    Cmd = #'queue.bind'{exchange = <<"bind-fail">>,
-                         routing_key = <<"-1">>},
-    ?assertExit(_, amqp_channel:call(Chan, Cmd)),
-    {ok, Ch2} = amqp_connection:open_channel(Conn),
-    amqp_channel:call(Ch2, #'queue.delete'{queue = Q}),
-    amqp_connection:close(Conn),
-    ok.
-
-test_binding_with_non_numeric_routing_key() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Chan} = amqp_connection:open_channel(Conn),
-    Declare1 = #'exchange.declare'{exchange = <<"bind-fail">>,
-                                    type = <<"x-consistent-hash">>},
-    #'exchange.declare_ok'{} = amqp_channel:call(Chan, Declare1),
-    Q = <<"test-queue">>,
-    Declare2 = #'queue.declare'{queue = Q},
-    #'queue.declare_ok'{} = amqp_channel:call(Chan, Declare2),
-    process_flag(trap_exit, true),
-    Cmd = #'queue.bind'{exchange = <<"bind-fail">>,
-                         routing_key = <<"not-a-number">>},
-    ?assertExit(_, amqp_channel:call(Chan, Cmd)),
-    {ok, Ch2} = amqp_connection:open_channel(Conn),
-    amqp_channel:call(Ch2, #'queue.delete'{queue = Q}),
-    amqp_connection:close(Conn),
-    ok.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile
deleted file mode 100644 (file)
index a42c666..0000000
+++ /dev/null
@@ -1,125 +0,0 @@
-# The contents of this file are subject to the Mozilla Public License
-# Version 1.1 (the "License"); you may not use this file except in
-# compliance with the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS"
-# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-# License for the specific language governing rights and limitations
-# under the License.
-#
-# The Original Code is RabbitMQ.
-#
-# The Initial Developer of the Original Code is GoPivotal, Inc.
-# Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-#
-
-VERSION=0.0.0
-
-SOURCE_PACKAGE_DIR=$(PACKAGE)-$(VERSION)-src
-SOURCE_PACKAGE_TAR_GZ=$(SOURCE_PACKAGE_DIR).tar.gz
-
-BROKER_HEADERS=$(wildcard $(BROKER_DIR)/$(INCLUDE_DIR)/*.hrl)
-BROKER_SOURCES=$(wildcard $(BROKER_DIR)/$(SOURCE_DIR)/*.erl)
-BROKER_DEPS=$(BROKER_HEADERS) $(BROKER_SOURCES)
-
-INFILES=$(shell find . -name '*.app.in')
-INTARGETS=$(patsubst %.in, %, $(INFILES))
-
-WEB_URL=http://www.rabbitmq.com/
-
-include common.mk
-
-run_in_broker: compile $(BROKER_DEPS) $(EBIN_DIR)/$(PACKAGE).app
-       $(MAKE) RABBITMQ_SERVER_START_ARGS='$(PA_LOAD_PATH)' -C $(BROKER_DIR) run
-
-clean: common_clean
-       rm -f $(INTARGETS)
-       rm -rf $(DIST_DIR)
-
-distribution: documentation source_tarball package
-
-%.app: %.app.in $(SOURCES) $(BROKER_DIR)/generate_app
-       escript  $(BROKER_DIR)/generate_app $< $@ $(SOURCE_DIR)
-       sed 's/%%VSN%%/$(VERSION)/' $@ > $@.tmp && mv $@.tmp $@
-
-###############################################################################
-##  Dialyzer
-###############################################################################
-
-RABBIT_PLT=$(BROKER_DIR)/rabbit.plt
-
-dialyze: $(RABBIT_PLT) $(TARGETS)
-       dialyzer --plt $(RABBIT_PLT) --no_native -Wrace_conditions $(TARGETS)
-
-.PHONY: $(RABBIT_PLT)
-$(RABBIT_PLT):
-       $(MAKE) -C $(BROKER_DIR) create-plt
-
-###############################################################################
-##  Documentation
-###############################################################################
-
-documentation: $(DOC_DIR)/index.html
-
-$(DOC_DIR)/overview.edoc: $(SOURCE_DIR)/overview.edoc.in
-       mkdir -p $(DOC_DIR)
-       sed -e 's:%%VERSION%%:$(VERSION):g' < $< > $@
-
-$(DOC_DIR)/index.html: $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) $(DOC_DIR)/overview.edoc $(SOURCES)
-       $(LIBS_PATH) erl -noshell -eval 'edoc:application(amqp_client, ".", [{preprocess, true}, {macros, [{edoc, true}]}])' -run init stop
-
-###############################################################################
-##  Testing
-###############################################################################
-
-include test.mk
-
-compile_tests: $(TEST_TARGETS) $(EBIN_DIR)/$(PACKAGE).app
-
-$(TEST_TARGETS): $(TEST_DIR)
-
-.PHONY: $(TEST_DIR)
-$(TEST_DIR): $(DEPS_DIR)/$(COMMON_PACKAGE_DIR)
-       $(MAKE) -C $(TEST_DIR)
-
-###############################################################################
-##  Packaging
-###############################################################################
-
-COPY=cp -pR
-
-$(DIST_DIR)/$(COMMON_PACKAGE_EZ): $(BROKER_DEPS) $(COMMON_PACKAGE).app | $(DIST_DIR)
-       rm -f $@
-       $(MAKE) -C $(BROKER_DIR)
-       rm -rf $(DIST_DIR)/$(COMMON_PACKAGE_DIR)
-       mkdir -p $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(INCLUDE_DIR)
-       mkdir -p $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(EBIN_DIR)
-       cp $(COMMON_PACKAGE).app $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(EBIN_DIR)/
-       $(foreach DEP, $(DEPS), \
-           ( cp $(BROKER_DIR)/ebin/$(DEP).beam $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(EBIN_DIR)/ \
-           );)
-       cp $(BROKER_DIR)/include/*.hrl $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(INCLUDE_DIR)/
-       (cd $(DIST_DIR); zip -q -r $(COMMON_PACKAGE_EZ) $(COMMON_PACKAGE_DIR))
-
-source_tarball: $(DIST_DIR)/$(COMMON_PACKAGE_EZ) $(EBIN_DIR)/$(PACKAGE).app | $(DIST_DIR)
-       mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(DIST_DIR)
-       $(COPY) $(DIST_DIR)/$(COMMON_PACKAGE_EZ) $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(DIST_DIR)/
-       $(COPY) README.in $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/README
-       elinks -dump -no-references -no-numbering $(WEB_URL)build-erlang-client.html >> $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/README
-       $(COPY) common.mk $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/
-       $(COPY) test.mk $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/
-       sed 's/%%VSN%%/$(VERSION)/' Makefile.in > $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/Makefile
-       mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(SOURCE_DIR)
-       $(COPY) $(SOURCE_DIR)/*.erl $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(SOURCE_DIR)/
-       mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(EBIN_DIR)
-       $(COPY) $(EBIN_DIR)/*.app $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(EBIN_DIR)/
-       mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(INCLUDE_DIR)
-       $(COPY) $(INCLUDE_DIR)/*.hrl $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(INCLUDE_DIR)/
-       mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(TEST_DIR)
-       $(COPY) $(TEST_DIR)/*.erl $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(TEST_DIR)/
-       $(COPY) $(TEST_DIR)/Makefile $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(TEST_DIR)/
-       cd $(DIST_DIR) ; tar czf $(SOURCE_PACKAGE_TAR_GZ) $(SOURCE_PACKAGE_DIR)
-
-$(DIST_DIR):
-       mkdir -p $@
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in
deleted file mode 100644 (file)
index 0b46f9f..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-# The contents of this file are subject to the Mozilla Public License
-# Version 1.1 (the "License"); you may not use this file except in
-# compliance with the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS"
-# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-# License for the specific language governing rights and limitations
-# under the License.
-#
-# The Original Code is RabbitMQ.
-#
-# The Initial Developer of the Original Code is GoPivotal, Inc.
-# Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-#
-
-VERSION=%%VSN%%
-
-include common.mk
-include test.mk
-
-clean: common_clean
-
-compile_tests:
-       $(MAKE) -C test VERSION=$(VERSION)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk
deleted file mode 100644 (file)
index 9de9221..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-# The contents of this file are subject to the Mozilla Public License
-# Version 1.1 (the "License"); you may not use this file except in
-# compliance with the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS"
-# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-# License for the specific language governing rights and limitations
-# under the License.
-#
-# The Original Code is RabbitMQ.
-#
-# The Initial Developer of the Original Code is GoPivotal, Inc.
-# Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-#
-
-# The client library can either be built from source control or by downloading
-# a source tarball from the RabbitMQ site. The intention behind the source tarball is
-# to be able to unpack this anywhere and just run a simple a test, under the
-# assumption that you have a running broker. This provides the simplest
-# possible way of building and running the client.
-#
-# The source control version, on the other hand, contains far more infrastructure
-# to start and stop brokers, package modules from the server, run embedded tests
-# and so forth.
-#
-# This means that the Makefile of the source control version contains a lot of
-# functionality that just wouldn't work with the source tarball version.
-#
-# The purpose of this common Makefile is to define as many commonalities
-# between the build requirements of the source control version and the source
-# tarball version. This avoids duplicating make definitions and rules and
-# helps keep the Makefile maintenence well factored.
-
-ifndef TMPDIR
-TMPDIR := /tmp
-endif
-
-EBIN_DIR=ebin
-BROKER_DIR=../rabbitmq-server
-export INCLUDE_DIR=include
-TEST_DIR=test
-SOURCE_DIR=src
-DIST_DIR=dist
-DEPS_DIR=deps
-DOC_DIR=doc
-DEPS_FILE=deps.mk
-
-ifeq ("$(ERL_LIBS)", "")
-       ERL_LIBS :=
-else
-       ERL_LIBS := :$(ERL_LIBS)
-endif
-
-ERL_PATH ?=
-
-PACKAGE=amqp_client
-PACKAGE_DIR=$(PACKAGE)-$(VERSION)
-PACKAGE_NAME_EZ=$(PACKAGE_DIR).ez
-COMMON_PACKAGE=rabbit_common
-export COMMON_PACKAGE_DIR=$(COMMON_PACKAGE)-$(VERSION)
-COMMON_PACKAGE_EZ=$(COMMON_PACKAGE_DIR).ez
-NODE_NAME=amqp_client
-
-DEPS=$(shell erl -noshell -eval '{ok,[{_,_,[_,_,{modules, Mods},_,_,_]}]} = \
-                                 file:consult("$(COMMON_PACKAGE).app.in"), \
-                                 [io:format("~p ",[M]) || M <- Mods], halt().')
-
-INCLUDES=$(wildcard $(INCLUDE_DIR)/*.hrl)
-SOURCES=$(wildcard $(SOURCE_DIR)/*.erl)
-TARGETS=$(patsubst $(SOURCE_DIR)/%.erl, $(EBIN_DIR)/%.beam, $(SOURCES))
-TEST_SOURCES=$(wildcard $(TEST_DIR)/*.erl)
-TEST_TARGETS=$(patsubst $(TEST_DIR)/%.erl, $(TEST_DIR)/%.beam, $(TEST_SOURCES))
-
-LIBS_PATH_UNIX=$(DEPS_DIR):$(DIST_DIR)$(ERL_LIBS)
-IS_CYGWIN=$(shell if [ $(shell expr "$(shell uname -s)" : 'CYGWIN_NT') -gt 0 ]; then echo "true"; else echo "false"; fi)
-ifeq ($(IS_CYGWIN),true)
-    LIBS_PATH=ERL_LIBS="$(shell cygpath -wp $(LIBS_PATH_UNIX))"
-else
-    LIBS_PATH=ERL_LIBS=$(LIBS_PATH_UNIX)
-endif
-
-LOAD_PATH=$(EBIN_DIR) $(TEST_DIR) $(ERL_PATH)
-
-RUN:=$(LIBS_PATH) erl -pa $(LOAD_PATH) -sname $(NODE_NAME)
-
-MKTEMP=$$(mktemp $(TMPDIR)/tmp.XXXXXXXXXX)
-
-ifndef USE_SPECS
-# our type specs rely on features / bug fixes in dialyzer that are
-# only available in R13B01 upwards (R13B is eshell 5.7.2)
-#
-# NB: do not mark this variable for export, otherwise it will
-# override the test in rabbitmq-server's Makefile when it does the
-# make -C, which causes problems whenever the test here and the test
-# there compare system_info(version) against *different* eshell
-# version numbers.
-USE_SPECS:=$(shell erl -noshell -eval 'io:format([list_to_integer(X) || X <- string:tokens(erlang:system_info(version), ".")] >= [5,7,2]), halt().')
-endif
-
-ERLC_OPTS=-I $(INCLUDE_DIR) -pa $(EBIN_DIR) -o $(EBIN_DIR) -Wall -v +debug_info $(if $(filter true,$(USE_SPECS)),-Duse_specs)
-
-RABBITMQ_NODENAME=rabbit
-PA_LOAD_PATH=-pa $(realpath $(LOAD_PATH))
-RABBITMQCTL=$(BROKER_DIR)/scripts/rabbitmqctl
-
-ifdef SSL_CERTS_DIR
-SSL := true
-ALL_SSL := { $(MAKE) test_ssl || OK=false; }
-ALL_SSL_COVERAGE := { $(MAKE) test_ssl_coverage || OK=false; }
-SSL_BROKER_ARGS := -rabbit ssl_listeners [{\\\"0.0.0.0\\\",5671},{\\\"::1\\\",5671}] \
-       -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},{verify,verify_peer},{fail_if_no_peer_cert,true}]
-SSL_CLIENT_ARGS := -erlang_client_ssl_dir $(SSL_CERTS_DIR)
-else
-SSL := @echo No SSL_CERTS_DIR defined. && false
-ALL_SSL := true
-ALL_SSL_COVERAGE := true
-SSL_BROKER_ARGS :=
-SSL_CLIENT_ARGS :=
-endif
-
-# Versions prior to this are not supported
-NEED_MAKE := 3.80
-ifneq "$(NEED_MAKE)" "$(firstword $(sort $(NEED_MAKE) $(MAKE_VERSION)))"
-$(error Versions of make prior to $(NEED_MAKE) are not supported)
-endif
-
-# .DEFAULT_GOAL introduced in 3.81
-DEFAULT_GOAL_MAKE := 3.81
-ifneq "$(DEFAULT_GOAL_MAKE)" "$(firstword $(sort $(DEFAULT_GOAL_MAKE) $(MAKE_VERSION)))"
-.DEFAULT_GOAL=all
-endif
-
-all: package
-
-common_clean:
-       rm -f $(EBIN_DIR)/*.beam
-       rm -f erl_crash.dump
-       rm -rf $(DEPS_DIR)
-       rm -rf $(DOC_DIR)
-       rm -f $(DEPS_FILE)
-       $(MAKE) -C $(TEST_DIR) clean
-
-compile: $(TARGETS) $(EBIN_DIR)/$(PACKAGE).app
-
-run: compile
-       $(RUN)
-
-###############################################################################
-##  Packaging
-###############################################################################
-
-$(DIST_DIR)/$(PACKAGE_NAME_EZ): $(TARGETS) $(EBIN_DIR)/$(PACKAGE).app | $(DIST_DIR)
-       rm -f $@
-       rm -rf $(DIST_DIR)/$(PACKAGE_DIR)
-       mkdir -p $(DIST_DIR)/$(PACKAGE_DIR)/$(EBIN_DIR)
-       mkdir -p $(DIST_DIR)/$(PACKAGE_DIR)/$(INCLUDE_DIR)
-       cp -r $(EBIN_DIR)/*.beam $(DIST_DIR)/$(PACKAGE_DIR)/$(EBIN_DIR)
-       cp -r $(EBIN_DIR)/*.app $(DIST_DIR)/$(PACKAGE_DIR)/$(EBIN_DIR)
-       mkdir -p $(DIST_DIR)/$(PACKAGE_DIR)/$(INCLUDE_DIR)
-       cp -r $(INCLUDE_DIR)/* $(DIST_DIR)/$(PACKAGE_DIR)/$(INCLUDE_DIR)
-       (cd $(DIST_DIR); zip -q -r $(PACKAGE_NAME_EZ) $(PACKAGE_DIR))
-
-package: $(DIST_DIR)/$(PACKAGE_NAME_EZ)
-
-###############################################################################
-##  Internal targets
-###############################################################################
-
-$(DEPS_DIR)/$(COMMON_PACKAGE_DIR): $(DIST_DIR)/$(COMMON_PACKAGE_EZ) | $(DEPS_DIR)
-       rm -rf $(DEPS_DIR)/$(COMMON_PACKAGE_DIR)
-       mkdir -p $(DEPS_DIR)/$(COMMON_PACKAGE_DIR)
-       unzip -q -o $< -d $(DEPS_DIR)
-
-$(DEPS_FILE): $(SOURCES) $(INCLUDES)
-       rm -f $@
-       echo $(subst : ,:,$(foreach FILE,$^,$(FILE):)) | escript $(BROKER_DIR)/generate_deps $@ $(EBIN_DIR)
-
-$(EBIN_DIR)/%.beam: $(SOURCE_DIR)/%.erl $(INCLUDES) $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) | $(DEPS_FILE)
-       $(LIBS_PATH) erlc $(ERLC_OPTS) $<
-
-$(DEPS_DIR):
-       mkdir -p $@
-
-# Note that all targets which depend on clean must have clean in their
-# name.  Also any target that doesn't depend on clean should not have
-# clean in its name, unless you know that you don't need any of the
-# automatic dependency generation for that target.
-
-# We want to load the dep file if *any* target *doesn't* contain
-# "clean" - i.e. if removing all clean-like targets leaves something
-
-ifeq "$(MAKECMDGOALS)" ""
-TESTABLEGOALS:=$(.DEFAULT_GOAL)
-else
-TESTABLEGOALS:=$(MAKECMDGOALS)
-endif
-
-ifneq "$(strip $(patsubst clean%,,$(patsubst %clean,,$(TESTABLEGOALS))))" ""
--include $(DEPS_FILE)
-endif
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_gen_consumer_spec.hrl b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_gen_consumer_spec.hrl
deleted file mode 100644 (file)
index fbaa28c..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2011-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--include("amqp_client.hrl").
-
--ifndef(edoc).
--type(state() :: any()).
--type(consume() :: #'basic.consume'{}).
--type(consume_ok() :: #'basic.consume_ok'{}).
--type(cancel() :: #'basic.cancel'{}).
--type(cancel_ok() :: #'basic.cancel_ok'{}).
--type(deliver() :: #'basic.deliver'{}).
--type(from() :: any()).
--type(reason() :: any()).
--type(ok_error() :: {ok, state()} | {error, reason(), state()}).
-
--spec(init/1 :: ([any()]) -> {ok, state()}).
--spec(handle_consume/3 :: (consume(), pid(), state()) -> ok_error()).
--spec(handle_consume_ok/3 :: (consume_ok(), consume(), state()) ->
-                                  ok_error()).
--spec(handle_cancel/2 :: (cancel(), state()) -> ok_error()).
--spec(handle_server_cancel/2 :: (cancel(), state()) -> ok_error()).
--spec(handle_cancel_ok/3 :: (cancel_ok(), cancel(), state()) -> ok_error()).
--spec(handle_deliver/3 :: (deliver(), #amqp_msg{}, state()) -> ok_error()).
--spec(handle_info/2 :: (any(), state()) -> ok_error()).
--spec(handle_call/3 :: (any(), from(), state()) ->
-                           {reply, any(), state()} | {noreply, state()} |
-                            {error, reason(), state()}).
--spec(terminate/2 :: (any(), state()) -> state()).
--endif.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in
deleted file mode 100644 (file)
index 930e232..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-{application, rabbit_common,
- [{description, "RabbitMQ Common Libraries"},
-  {vsn, "%%VSN%%"},
-  {modules, [
-             app_utils,
-             credit_flow,
-             pmon,
-             gen_server2,
-             mirrored_supervisor,
-             mochijson2,
-             priority_queue,
-             rabbit_backing_queue,
-             rabbit_basic,
-             rabbit_binary_generator,
-             rabbit_binary_parser,
-             rabbit_channel,
-             rabbit_channel_interceptor,
-             rabbit_runtime_parameter,
-             rabbit_command_assembler,
-             rabbit_exchange_type,
-             rabbit_exchange_decorator,
-             rabbit_authn_backend,
-             rabbit_authz_backend,
-             rabbit_auth_mechanism,
-             rabbit_framing_amqp_0_8,
-             rabbit_framing_amqp_0_9_1,
-             rabbit_heartbeat,
-             rabbit_misc,
-             rabbit_msg_store_index,
-             rabbit_net,
-             rabbit_networking,
-             rabbit_nodes,
-             rabbit_policy_validator,
-             rabbit_reader,
-             rabbit_writer,
-             rabbit_event,
-             rabbit_queue_collector,
-             rabbit_queue_decorator,
-             rabbit_amqqueue,
-             ssl_compat,
-             supervisor2,
-             time_compat
-  ]},
-  {registered, []},
-  {env, []},
-  {applications, [kernel, stdlib]}]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk
deleted file mode 100644 (file)
index ae3057f..0000000
+++ /dev/null
@@ -1,130 +0,0 @@
-# The contents of this file are subject to the Mozilla Public License
-# Version 1.1 (the "License"); you may not use this file except in
-# compliance with the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS"
-# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-# License for the specific language governing rights and limitations
-# under the License.
-#
-# The Original Code is RabbitMQ.
-#
-# The Initial Developer of the Original Code is GoPivotal, Inc.
-# Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-#
-
-IS_SUCCESS:=egrep "(All .+ tests (successful|passed).|Test passed.)"
-TESTING_MESSAGE:=-eval "error_logger:tty(false), error_logger:logfile({open, \"$(TMPDIR)/erlang-client-tests.log\"}), io:format(\"~nTesting in progress. Please wait...~n~n\")."
-
-prepare_tests: compile compile_tests
-
-all_tests: prepare_tests
-       OK=true && \
-       { $(MAKE) test_suites || OK=false; } && \
-       { $(MAKE) test_common_package || OK=false; } && \
-       { $(MAKE) test_direct || OK=false; } && \
-       $$OK
-
-test_suites: prepare_tests
-       OK=true && \
-       { $(MAKE) test_network || OK=false; } && \
-       { $(MAKE) test_remote_direct || OK=false; } && \
-       $(ALL_SSL) && \
-       $$OK
-
-test_suites_coverage: prepare_tests
-       OK=true && \
-       { $(MAKE) test_network_coverage || OK=false; } && \
-       { $(MAKE) test_direct_coverage || OK=false; } && \
-       $(ALL_SSL_COVERAGE) && \
-       $$OK
-
-## Starts a broker, configures users and runs the tests on the same node
-run_test_in_broker:
-       $(MAKE) start_test_broker_node
-       $(MAKE) unboot_broker
-       OK=true && \
-       TMPFILE=$(MKTEMP) && echo "Redirecting output to $$TMPFILE" && \
-       { $(MAKE) -C $(BROKER_DIR) run-node \
-               RABBITMQ_SERVER_START_ARGS="$(PA_LOAD_PATH) $(SSL_BROKER_ARGS) \
-               -noshell -s rabbit $(RUN_TEST_ARGS) -s init stop" 2>&1 | \
-               tee $$TMPFILE || OK=false; } && \
-       { $(IS_SUCCESS) $$TMPFILE || OK=false; } && \
-       rm $$TMPFILE && \
-       $(MAKE) boot_broker && \
-       $(MAKE) stop_test_broker_node && \
-       $$OK
-
-## Starts a broker, configures users and runs the tests from a different node
-run_test_detached: start_test_broker_node
-       OK=true && \
-       TMPFILE=$(MKTEMP) && echo "Redirecting output to $$TMPFILE" && \
-       { $(RUN) -noinput $(TESTING_MESSAGE) \
-          $(SSL_CLIENT_ARGS) $(RUN_TEST_ARGS) \
-           -s init stop 2>&1 | tee $$TMPFILE || OK=false; } && \
-       { $(IS_SUCCESS) $$TMPFILE || OK=false; } && \
-       rm $$TMPFILE && \
-       $(MAKE) stop_test_broker_node && \
-       $$OK
-
-## Starts a broker, configures users and runs the tests from a different node
-run_test_foreground: start_test_broker_node
-       OK=true && \
-       { $(RUN) -noinput $(TESTING_MESSAGE) \
-          $(SSL_CLIENT_ARGS) $(RUN_TEST_ARGS) \
-           -s init stop || OK=false; } && \
-       $(MAKE) stop_test_broker_node && \
-       $$OK
-
-start_test_broker_node: boot_broker
-       sleep 1
-       - $(RABBITMQCTL) delete_user test_user_no_perm
-       $(RABBITMQCTL) add_user test_user_no_perm test_user_no_perm
-       sleep 1
-
-stop_test_broker_node:
-       sleep 1
-       $(RABBITMQCTL) delete_user test_user_no_perm
-       $(MAKE) unboot_broker
-
-boot_broker:
-       $(MAKE) -C $(BROKER_DIR) start-background-node RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS) $(SSL_BROKER_ARGS)"
-       $(MAKE) -C $(BROKER_DIR) start-rabbit-on-node
-
-unboot_broker:
-       $(MAKE) -C $(BROKER_DIR) stop-rabbit-on-node
-       $(MAKE) -C $(BROKER_DIR) stop-node
-
-ssl:
-       $(SSL)
-
-test_ssl: prepare_tests ssl
-       $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network_ssl" RUN_TEST_ARGS="-s amqp_client_SUITE test"
-
-test_network: prepare_tests
-       $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network" RUN_TEST_ARGS="-s amqp_client_SUITE test"
-
-test_direct: prepare_tests
-       $(MAKE) run_test_in_broker AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test"
-
-test_remote_direct: prepare_tests
-       $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test"
-
-test_common_package: $(DIST_DIR)/$(COMMON_PACKAGE_EZ) package prepare_tests
-       $(MAKE) run_test_detached RUN="$(LIBS_PATH) erl -pa $(TEST_DIR)" \
-           AMQP_CLIENT_TEST_CONNECTION_TYPE="network" RUN_TEST_ARGS="-s amqp_client_SUITE test"
-       $(MAKE) run_test_detached RUN="$(LIBS_PATH) erl -pa $(TEST_DIR) -sname amqp_client" \
-           AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test"
-
-test_ssl_coverage: prepare_tests ssl
-       $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network_ssl" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage"
-
-test_network_coverage: prepare_tests
-       $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage"
-
-test_remote_direct_coverage: prepare_tests
-       $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage"
-
-test_direct_coverage: prepare_tests
-       $(MAKE) run_test_in_broker AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage"
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/Makefile b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/Makefile
deleted file mode 100644 (file)
index beef64c..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-# The contents of this file are subject to the Mozilla Public License
-# Version 1.1 (the "License"); you may not use this file except in
-# compliance with the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS"
-# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-# License for the specific language governing rights and limitations
-# under the License.
-#
-# The Original Code is RabbitMQ.
-#
-# The Initial Developer of the Original Code is GoPivotal, Inc.
-# Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-#
-
-TEST_SOURCES=$(wildcard *.erl)
-TEST_TARGETS=$(patsubst %.erl, %.beam, $(TEST_SOURCES))
-INCLUDES=$(wildcard ../$(INCLUDE_DIR)/*.hrl)
-DEPS_DIR=../deps
-
-ERLC_OPTS=-I ../$(INCLUDE_DIR) -o ./ -Wall -v +debug_info
-LIBS_PATH=ERL_LIBS=$(DEPS_DIR)
-
-all: compile
-
-compile: $(TEST_TARGETS)
-
-%.beam: %.erl $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) $(INCLUDES)
-       $(LIBS_PATH) erlc $(ERLC_OPTS) $<
-
-clean:
-       rm -f *.beam
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_client_SUITE.erl b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_client_SUITE.erl
deleted file mode 100644 (file)
index e0bef04..0000000
+++ /dev/null
@@ -1,126 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(amqp_client_SUITE).
-
--export([test_coverage/0]).
-
--include_lib("eunit/include/eunit.hrl").
-
--define(FUNCTION,
-        begin
-            catch throw(x),
-            Fun = case erlang:get_stacktrace() of
-                      [{_, F, _}    | _] -> F; %% < R15
-                      [{_, F, _, _} | _] -> F %% >= R15
-                  end,
-            list_to_atom(string:strip(atom_to_list(Fun), right, $_))
-        end).
-
--define(RUN(Props), run(?FUNCTION, Props)).
-
-%%---------------------------------------------------------------------------
-%% Tests
-%%---------------------------------------------------------------------------
-
-amqp_uri_parse_test_()                  -> ?RUN([]).
-route_destination_test_()               -> ?RUN([]).
-basic_get_test_()                       -> ?RUN([]).
-basic_get_ipv6_test_()                  -> ?RUN([]).
-basic_return_test_()                    -> ?RUN([]).
-simultaneous_close_test_()              -> ?RUN([repeat]).
-basic_qos_test_()                       -> ?RUN([]).
-basic_recover_test_()                   -> ?RUN([]).
-basic_consume_test_()                   -> ?RUN([]).
-consume_notification_test_()            -> ?RUN([]).
-basic_nack_test_()                      -> ?RUN([]).
-large_content_test_()                   -> ?RUN([]).
-lifecycle_test_()                       -> ?RUN([]).
-direct_no_user_test_()                  -> ?RUN([]).
-direct_no_password_test_()              -> ?RUN([]).
-nowait_exchange_declare_test_()         -> ?RUN([]).
-channel_repeat_open_close_test_()       -> ?RUN([]).
-channel_multi_open_close_test_()        -> ?RUN([]).
-basic_ack_test_()                       -> ?RUN([]).
-basic_ack_call_test_()                  -> ?RUN([]).
-channel_lifecycle_test_()               -> ?RUN([]).
-queue_unbind_test_()                    -> ?RUN([]).
-sync_method_serialization_test_()       -> ?RUN([]).
-async_sync_method_serialization_test_() -> ?RUN([]).
-sync_async_method_serialization_test_() -> ?RUN([]).
-teardown_test_()                        -> ?RUN([repeat]).
-rpc_test_()                             -> ?RUN([]).
-rpc_client_test_()                      -> ?RUN([]).
-pub_and_close_test_()                   -> ?RUN([]).
-channel_tune_negotiation_test_()        -> ?RUN([]).
-confirm_test_()                         -> ?RUN([]).
-confirm_barrier_test_()                 -> ?RUN([]).
-confirm_select_before_wait_test_()      -> ?RUN([]).
-confirm_barrier_timeout_test_()         -> ?RUN([]).
-confirm_barrier_die_timeout_test_()     -> ?RUN([]).
-default_consumer_test_()                -> ?RUN([]).
-subscribe_nowait_test_()                -> ?RUN([]).
-connection_blocked_network_test_()      -> ?RUN([]).
-
-non_existent_exchange_test_()           -> ?RUN([negative]).
-bogus_rpc_test_()                       -> ?RUN([negative, repeat]).
-hard_error_test_()                      -> ?RUN([negative, repeat]).
-non_existent_user_test_()               -> ?RUN([negative]).
-invalid_password_test_()                -> ?RUN([negative]).
-non_existent_vhost_test_()              -> ?RUN([negative]).
-no_permission_test_()                   -> ?RUN([negative]).
-channel_writer_death_test_()            -> ?RUN([negative]).
-connection_failure_test_()              -> ?RUN([negative]).
-channel_death_test_()                   -> ?RUN([negative]).
-shortstr_overflow_property_test_()      -> ?RUN([negative]).
-shortstr_overflow_field_test_()         -> ?RUN([negative]).
-command_invalid_over_channel_test_()    -> ?RUN([negative]).
-command_invalid_over_channel0_test_()   -> ?RUN([negative]).
-
-%%---------------------------------------------------------------------------
-%% Internal
-%%---------------------------------------------------------------------------
-
-run(TestName, Props) ->
-    RepeatCount = case proplists:get_value(repeat, Props, false) of
-                      true                          -> 100;
-                      Number when is_number(Number) -> Number;
-                      false                         -> 1
-                  end,
-    Module = case proplists:get_bool(negative, Props) of
-                 true  -> negative_test_util;
-                 false -> test_util
-             end,
-    {timeout, proplists:get_value(timeout, Props, 60),
-     fun () ->
-             lists:foreach(
-                 fun (_) ->
-                         try erlang:apply(Module, TestName, []) of
-                             Ret -> Ret
-                         catch
-                             exit:normal -> ok
-                         end
-                 end, lists:seq(1, RepeatCount))
-     end}.
-
-%%---------------------------------------------------------------------------
-%% Coverage
-%%---------------------------------------------------------------------------
-
-test_coverage() ->
-    rabbit_misc:enable_cover(),
-    test(),
-    rabbit_misc:report_cover().
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_dbg.erl b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_dbg.erl
deleted file mode 100644 (file)
index cb20555..0000000
+++ /dev/null
@@ -1,122 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(amqp_dbg).
-
--include_lib("stdlib/include/ms_transform.hrl").
-
--export([tracer/0, all/0, c_all/0]).
--export([supervision/0, c_supervision/0,
-         connection_lifecycle/0, c_connection_lifecycle/0,
-         channels_manager_lifecycle/0, c_channels_manager_lifecycle/0,
-         channel_lifecycle/0, c_channel_lifecycle/0,
-         methods/0, c_methods/0]).
-
-
-tracer() ->
-    Ret = dbg:tracer(),
-    {ok, _} = dbg:p(all, c),
-    Ret.
-
-all() ->
-    tpl_list(all_args()).
-
-c_all() ->
-    ctpl_list(all_args()).
-
-supervision() ->
-    tpl_list(sup_args()).
-
-c_supervision() ->
-    ctpl_list(sup_args()).
-
-connection_lifecycle() ->
-    tpl_list(cl_args()).
-
-c_connection_lifecycle() ->
-    ctpl_list(cl_args()).
-
-channels_manager_lifecycle() ->
-    tpl_list(cml_args()).
-
-c_channels_manager_lifecycle() ->
-    ctpl_list(cml_args()).
-
-channel_lifecycle() ->
-    tpl_list(cl_args()).
-
-c_channel_lifecycle() ->
-    ctpl_list(cl_args()).
-
-methods() ->
-    tpl_list(m_args()).
-
-c_methods() ->
-    ctpl_list(m_args()).
-
-%%---------------------------------------------------------------------------
-%% Internal plumbing
-%%---------------------------------------------------------------------------
-
-all_args() ->
-    sup_args() ++ ncl_args() ++ cml_args() ++ cl_args() ++
-        m_args().
-
-sup_args() ->
-    [{amqp_connection_sup, start_link, return_ms()},
-     {amqp_connection_type_sup, start_link, return_ms()},
-     {amqp_channel_sup_sup, start_link, return_ms()},
-     {amqp_channel_sup_sup, start_channel_sup, return_ms()},
-     {amqp_channel_sup, start_link, return_ms()},
-     {amqp_network_connection, start_infrastructure, return_ms()},
-     {amqp_network_connection, start_heartbeat, return_ms()},
-     {amqp_channel, start_writer, return_ms()}].
-
-ncl_args() ->
-    [{amqp_main_reader, start_link, return_ms()},
-     {amqp_gen_connection, set_closing_state, []},
-     {amqp_gen_connection, handle_channels_terminated, []},
-     {amqp_network_connection, connect, []},
-     {amqp_direct_connection, connect, []},
-     {amqp_gen_connection, terminate, []}].
-
-cml_args() ->
-     [{amqp_channels_manager, handle_open_channel, return_ms()},
-      {amqp_channels_manager, handle_channel_down, []},
-      {amqp_channels_manager, signal_channels_connection_closing, []}].
-
-cl_args() ->
-    [{amqp_channel, init, []},
-     {amqp_channel_util, open_channel, []},
-     {amqp_channel, terminate, []}].
-
-m_args() ->
-    [{amqp_channel, do, return_ms()},
-     {amqp_channel, handle_method, []},
-     {amqp_gen_connection, handle_method, []},
-     {amqp_network_connection, do, return_ms()},
-     {amqp_network_connection, handshake_recv, return_ms()}].
-
-tpl_list(ArgsList) ->
-    [{ok, _} = dbg:tpl(Module, Func, Ms) || {Module, Func, Ms} <- ArgsList],
-    ok.
-
-ctpl_list(ArgsList) ->
-    [{ok, _} = dbg:ctpl(Module, Func) || {Module, Func, _} <- ArgsList],
-    ok.
-
-return_ms() ->
-    dbg:fun2ms(fun(_) -> return_trace() end).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/negative_test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/negative_test_util.erl
deleted file mode 100644 (file)
index a4f962c..0000000
+++ /dev/null
@@ -1,215 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(negative_test_util).
-
--include("amqp_client_internal.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--compile(export_all).
-
-non_existent_exchange_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    X = <<"test">>,
-    RoutingKey = <<"a">>,
-    Payload = <<"foobar">>,
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    {ok, OtherChannel} = amqp_connection:open_channel(Connection),
-    amqp_channel:call(Channel, #'exchange.declare'{exchange = X}),
-
-    %% Deliberately mix up the routingkey and exchange arguments
-    Publish = #'basic.publish'{exchange = RoutingKey, routing_key = X},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = Payload}),
-    test_util:wait_for_death(Channel),
-
-    %% Make sure Connection and OtherChannel still serve us and are not dead
-    {ok, _} = amqp_connection:open_channel(Connection),
-    amqp_channel:call(OtherChannel, #'exchange.delete'{exchange = X}),
-    amqp_connection:close(Connection).
-
-bogus_rpc_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    %% Deliberately bind to a non-existent queue
-    Bind = #'queue.bind'{exchange    = <<"amq.topic">>,
-                         queue       = <<"does-not-exist">>,
-                         routing_key = <<>>},
-    try amqp_channel:call(Channel, Bind) of
-        _ -> exit(expected_to_exit)
-    catch
-        exit:{{shutdown, {server_initiated_close, Code, _}},_} ->
-            ?assertMatch(?NOT_FOUND, Code)
-    end,
-    test_util:wait_for_death(Channel),
-    ?assertMatch(true, is_process_alive(Connection)),
-    amqp_connection:close(Connection).
-
-hard_error_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    {ok, OtherChannel} = amqp_connection:open_channel(Connection),
-    OtherChannelMonitor = erlang:monitor(process, OtherChannel),
-    Qos = #'basic.qos'{prefetch_size = 10000000},
-    try amqp_channel:call(Channel, Qos) of
-        _ -> exit(expected_to_exit)
-    catch
-        exit:{{shutdown, {connection_closing,
-                          {server_initiated_close, ?NOT_IMPLEMENTED, _}}}, _} ->
-            ok
-    end,
-    receive
-        {'DOWN', OtherChannelMonitor, process, OtherChannel, OtherExit} ->
-            ?assertMatch({shutdown,
-                          {connection_closing,
-                           {server_initiated_close, ?NOT_IMPLEMENTED, _}}},
-                         OtherExit)
-    end,
-    test_util:wait_for_death(Channel),
-    test_util:wait_for_death(Connection).
-
-%% The connection should die if the underlying connection is prematurely
-%% closed. For a network connection, this means that the TCP socket is
-%% closed. For a direct connection (remotely only, of course), this means that
-%% the RabbitMQ node appears as down.
-connection_failure_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    case amqp_connection:info(Connection, [type, amqp_params]) of
-        [{type, direct}, {amqp_params, Params}]  ->
-            case Params#amqp_params_direct.node of
-                N when N == node() ->
-                    amqp_connection:close(Connection);
-                N ->
-                    true = erlang:disconnect_node(N),
-                    net_adm:ping(N)
-            end;
-        [{type, network}, {amqp_params, _}] ->
-            [{sock, Sock}] = amqp_connection:info(Connection, [sock]),
-            ok = gen_tcp:close(Sock)
-    end,
-    test_util:wait_for_death(Connection),
-    ok.
-
-%% An error in a channel should result in the death of the entire connection.
-%% The death of the channel is caused by an error in generating the frames
-%% (writer dies)
-channel_writer_death_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    Publish = #'basic.publish'{routing_key = <<>>, exchange = <<>>},
-    QoS = #'basic.qos'{prefetch_count = 0},
-    Message = #amqp_msg{props = <<>>, payload = <<>>},
-    amqp_channel:cast(Channel, Publish, Message),
-    ?assertExit(_, amqp_channel:call(Channel, QoS)),
-    test_util:wait_for_death(Channel),
-    test_util:wait_for_death(Connection),
-    ok.
-
-%% An error in the channel process should result in the death of the entire
-%% connection. The death of the channel is caused by making a call with an
-%% invalid message to the channel process
-channel_death_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    ?assertExit(_, amqp_channel:call(Channel, bogus_message)),
-    test_util:wait_for_death(Channel),
-    test_util:wait_for_death(Connection),
-    ok.
-
-%% Attempting to send a shortstr longer than 255 bytes in a property field
-%% should fail - this only applies to the network case
-shortstr_overflow_property_test() ->
-    {ok, Connection} = test_util:new_connection(just_network),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    SentString = << <<"k">> || _ <- lists:seq(1, 340)>>,
-    #'queue.declare_ok'{queue = Q}
-        = amqp_channel:call(Channel, #'queue.declare'{exclusive = true}),
-    Publish = #'basic.publish'{exchange = <<>>, routing_key = Q},
-    PBasic = #'P_basic'{content_type = SentString},
-    AmqpMsg = #amqp_msg{payload = <<"foobar">>, props = PBasic},
-    QoS = #'basic.qos'{prefetch_count = 0},
-    amqp_channel:cast(Channel, Publish, AmqpMsg),
-    ?assertExit(_, amqp_channel:call(Channel, QoS)),
-    test_util:wait_for_death(Channel),
-    test_util:wait_for_death(Connection),
-    ok.
-
-%% Attempting to send a shortstr longer than 255 bytes in a method's field
-%% should fail - this only applies to the network case
-shortstr_overflow_field_test() ->
-    {ok, Connection} = test_util:new_connection(just_network),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    SentString = << <<"k">> || _ <- lists:seq(1, 340)>>,
-    #'queue.declare_ok'{queue = Q}
-        = amqp_channel:call(Channel, #'queue.declare'{exclusive = true}),
-    ?assertExit(_, amqp_channel:call(
-                       Channel, #'basic.consume'{queue = Q,
-                                                 no_ack = true,
-                                                 consumer_tag = SentString})),
-    test_util:wait_for_death(Channel),
-    test_util:wait_for_death(Connection),
-    ok.
-
-%% Simulates a #'connection.open'{} method received on non-zero channel. The
-%% connection is expected to send a '#connection.close{}' to the server with
-%% reply code command_invalid
-command_invalid_over_channel_test() ->
-    {ok, Connection} = test_util:new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    MonitorRef = erlang:monitor(process, Connection),
-    case amqp_connection:info(Connection, [type]) of
-        [{type, direct}]  -> Channel ! {send_command, #'connection.open'{}};
-        [{type, network}] -> gen_server:cast(Channel,
-                                 {method, #'connection.open'{}, none, noflow})
-    end,
-    assert_down_with_error(MonitorRef, command_invalid),
-    ?assertNot(is_process_alive(Channel)),
-    ok.
-
-%% Simulates a #'basic.ack'{} method received on channel zero. The connection
-%% is expected to send a '#connection.close{}' to the server with reply code
-%% command_invalid - this only applies to the network case
-command_invalid_over_channel0_test() ->
-    {ok, Connection} = test_util:new_connection(just_network),
-    gen_server:cast(Connection, {method, #'basic.ack'{}, none, noflow}),
-    MonitorRef = erlang:monitor(process, Connection),
-    assert_down_with_error(MonitorRef, command_invalid),
-    ok.
-
-assert_down_with_error(MonitorRef, CodeAtom) ->
-    receive
-        {'DOWN', MonitorRef, process, _, Reason} ->
-            {shutdown, {server_misbehaved, Code, _}} = Reason,
-            ?assertMatch(CodeAtom, ?PROTOCOL:amqp_exception(Code))
-    after 2000 ->
-        exit(did_not_die)
-    end.
-
-non_existent_user_test() ->
-    Params = [{username, <<"no-user">>}, {password, <<"no-user">>}],
-    ?assertMatch({error, {auth_failure, _}}, test_util:new_connection(Params)).
-
-invalid_password_test() ->
-    Params = [{username, <<"guest">>}, {password, <<"bad">>}],
-    ?assertMatch({error, {auth_failure, _}}, test_util:new_connection(Params)).
-
-non_existent_vhost_test() ->
-    Params = [{virtual_host, <<"oops">>}],
-    ?assertMatch({error, access_refused}, test_util:new_connection(Params)).
-
-no_permission_test() ->
-    Params = [{username, <<"test_user_no_perm">>},
-              {password, <<"test_user_no_perm">>}],
-    ?assertMatch({error, access_refused}, test_util:new_connection(Params)).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/test_util.erl
deleted file mode 100644 (file)
index b674423..0000000
+++ /dev/null
@@ -1,1168 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2011-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(test_util).
-
--include_lib("eunit/include/eunit.hrl").
--include("amqp_client_internal.hrl").
-
--compile([export_all]).
-
--define(TEST_REPEATS, 100).
-
-%% The latch constant defines how many processes are spawned in order
-%% to run certain functionality in parallel. It follows the standard
-%% countdown latch pattern.
--define(Latch, 100).
-
-%% The wait constant defines how long a consumer waits before it
-%% unsubscribes
--define(Wait, 200).
-
-%% How to long wait for a process to die after an expected failure
--define(DeathWait, 5000).
-
-%% AMQP URI parsing test
-amqp_uri_parse_test() ->
-    %% From the spec (adapted)
-    ?assertMatch({ok, #amqp_params_network{username     = <<"user">>,
-                                           password     = <<"pass">>,
-                                           host         = "host",
-                                           port         = 10000,
-                                           virtual_host = <<"vhost">>,
-                                           heartbeat    = 5}},
-                 amqp_uri:parse(
-                   "amqp://user:pass@host:10000/vhost?heartbeat=5")),
-    ?assertMatch({ok, #amqp_params_network{username     = <<"usera">>,
-                                           password     = <<"apass">>,
-                                           host         = "hoast",
-                                           port         = 10000,
-                                           virtual_host = <<"v/host">>}},
-                 amqp_uri:parse(
-                   "aMQp://user%61:%61pass@ho%61st:10000/v%2fhost")),
-    ?assertMatch({ok, #amqp_params_direct{}}, amqp_uri:parse("amqp://")),
-    ?assertMatch({ok, #amqp_params_direct{username     = <<"">>,
-                                          virtual_host = <<"">>}},
-                 amqp_uri:parse("amqp://:@/")),
-    ?assertMatch({ok, #amqp_params_network{username     = <<"">>,
-                                           password     = <<"">>,
-                                           virtual_host = <<"">>,
-                                           host         = "host"}},
-                 amqp_uri:parse("amqp://:@host/")),
-    ?assertMatch({ok, #amqp_params_direct{username = <<"user">>}},
-                 amqp_uri:parse("amqp://user@")),
-    ?assertMatch({ok, #amqp_params_network{username = <<"user">>,
-                                           password = <<"pass">>,
-                                           host     = "localhost"}},
-                 amqp_uri:parse("amqp://user:pass@localhost")),
-    ?assertMatch({ok, #amqp_params_network{host         = "host",
-                                           virtual_host = <<"/">>}},
-                 amqp_uri:parse("amqp://host")),
-    ?assertMatch({ok, #amqp_params_network{port = 10000,
-                                           host = "localhost"}},
-                 amqp_uri:parse("amqp://localhost:10000")),
-    ?assertMatch({ok, #amqp_params_direct{virtual_host = <<"vhost">>}},
-                 amqp_uri:parse("amqp:///vhost")),
-    ?assertMatch({ok, #amqp_params_network{host         = "host",
-                                           virtual_host = <<"">>}},
-                 amqp_uri:parse("amqp://host/")),
-    ?assertMatch({ok, #amqp_params_network{host         = "host",
-                                           virtual_host = <<"/">>}},
-                 amqp_uri:parse("amqp://host/%2f")),
-    ?assertMatch({ok, #amqp_params_network{host = "::1"}},
-                 amqp_uri:parse("amqp://[::1]")),
-
-    %% Varous other cases
-    ?assertMatch({ok, #amqp_params_network{host = "host", port = 100}},
-                 amqp_uri:parse("amqp://host:100")),
-    ?assertMatch({ok, #amqp_params_network{host = "::1", port = 100}},
-                 amqp_uri:parse("amqp://[::1]:100")),
-
-    ?assertMatch({ok, #amqp_params_network{host         = "host",
-                                           virtual_host = <<"blah">>}},
-                 amqp_uri:parse("amqp://host/blah")),
-    ?assertMatch({ok, #amqp_params_network{host         = "host",
-                                           port         = 100,
-                                           virtual_host = <<"blah">>}},
-                 amqp_uri:parse("amqp://host:100/blah")),
-    ?assertMatch({ok, #amqp_params_network{host         = "::1",
-                                           virtual_host = <<"blah">>}},
-                 amqp_uri:parse("amqp://[::1]/blah")),
-    ?assertMatch({ok, #amqp_params_network{host         = "::1",
-                                           port         = 100,
-                                           virtual_host = <<"blah">>}},
-                 amqp_uri:parse("amqp://[::1]:100/blah")),
-
-    ?assertMatch({ok, #amqp_params_network{username = <<"user">>,
-                                           password = <<"pass">>,
-                                           host     = "host"}},
-                 amqp_uri:parse("amqp://user:pass@host")),
-    ?assertMatch({ok, #amqp_params_network{username = <<"user">>,
-                                           password = <<"pass">>,
-                                           port     = 100}},
-                 amqp_uri:parse("amqp://user:pass@host:100")),
-    ?assertMatch({ok, #amqp_params_network{username = <<"user">>,
-                                           password = <<"pass">>,
-                                           host     = "::1"}},
-                 amqp_uri:parse("amqp://user:pass@[::1]")),
-    ?assertMatch({ok, #amqp_params_network{username = <<"user">>,
-                                           password = <<"pass">>,
-                                           host     = "::1",
-                                           port     = 100}},
-                 amqp_uri:parse("amqp://user:pass@[::1]:100")),
-
-    %% Various failure cases
-    ?assertMatch({error, _}, amqp_uri:parse("http://www.rabbitmq.com")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo:bar:baz")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo[::1]")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo:[::1]")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://[::1]foo")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo:1000xyz")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo:1000000")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo/bar/baz")),
-
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo%1")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo%1x")),
-    ?assertMatch({error, _}, amqp_uri:parse("amqp://foo%xy")),
-
-    ok.
-
-%%--------------------------------------------------------------------
-%% Destination Parsing Tests
-%%--------------------------------------------------------------------
-
-route_destination_test() ->
-    %% valid queue
-    ?assertMatch({ok, {queue, "test"}}, parse_dest("/queue/test")),
-
-    %% valid topic
-    ?assertMatch({ok, {topic, "test"}}, parse_dest("/topic/test")),
-
-    %% valid exchange
-    ?assertMatch({ok, {exchange, {"test", undefined}}}, parse_dest("/exchange/test")),
-
-    %% valid temp queue
-    ?assertMatch({ok, {temp_queue, "test"}}, parse_dest("/temp-queue/test")),
-
-    %% valid reply queue
-    ?assertMatch({ok, {reply_queue, "test"}}, parse_dest("/reply-queue/test")),
-    ?assertMatch({ok, {reply_queue, "test/2"}}, parse_dest("/reply-queue/test/2")),
-
-    %% valid exchange with pattern
-    ?assertMatch({ok, {exchange, {"test", "pattern"}}},
-        parse_dest("/exchange/test/pattern")),
-
-    %% valid pre-declared queue
-    ?assertMatch({ok, {amqqueue, "test"}}, parse_dest("/amq/queue/test")),
-
-    %% queue without name
-    ?assertMatch({error, {invalid_destination, queue, ""}}, parse_dest("/queue")),
-    ?assertMatch({ok, {queue, undefined}}, parse_dest("/queue", true)),
-
-    %% topic without name
-    ?assertMatch({error, {invalid_destination, topic, ""}}, parse_dest("/topic")),
-
-    %% exchange without name
-    ?assertMatch({error, {invalid_destination, exchange, ""}},
-        parse_dest("/exchange")),
-
-    %% exchange default name
-    ?assertMatch({error, {invalid_destination, exchange, "//foo"}},
-        parse_dest("/exchange//foo")),
-
-    %% amqqueue without name
-    ?assertMatch({error, {invalid_destination, amqqueue, ""}},
-        parse_dest("/amq/queue")),
-
-    %% queue without name with trailing slash
-    ?assertMatch({error, {invalid_destination, queue, "/"}}, parse_dest("/queue/")),
-
-    %% topic without name with trailing slash
-    ?assertMatch({error, {invalid_destination, topic, "/"}}, parse_dest("/topic/")),
-
-    %% exchange without name with trailing slash
-    ?assertMatch({error, {invalid_destination, exchange, "/"}},
-        parse_dest("/exchange/")),
-
-    %% queue with invalid name
-    ?assertMatch({error, {invalid_destination, queue, "/foo/bar"}},
-        parse_dest("/queue/foo/bar")),
-
-    %% topic with invalid name
-    ?assertMatch({error, {invalid_destination, topic, "/foo/bar"}},
-        parse_dest("/topic/foo/bar")),
-
-    %% exchange with invalid name
-    ?assertMatch({error, {invalid_destination, exchange, "/foo/bar/baz"}},
-        parse_dest("/exchange/foo/bar/baz")),
-
-    %% unknown destination
-    ?assertMatch({error, {unknown_destination, "/blah/boo"}},
-        parse_dest("/blah/boo")),
-
-    %% queue with escaped name
-    ?assertMatch({ok, {queue, "te/st"}}, parse_dest("/queue/te%2Fst")),
-
-    %% valid exchange with escaped name and pattern
-    ?assertMatch({ok, {exchange, {"te/st", "pa/tt/ern"}}},
-        parse_dest("/exchange/te%2Fst/pa%2Ftt%2Fern")),
-
-    ok.
-
-parse_dest(Destination, Params) ->
-    rabbit_routing_util:parse_endpoint(Destination, Params).
-parse_dest(Destination) ->
-    rabbit_routing_util:parse_endpoint(Destination).
-
-%%%%
-%%
-%% This is an example of how the client interaction should work
-%%
-%%   {ok, Connection} = amqp_connection:start(network),
-%%   {ok, Channel} = amqp_connection:open_channel(Connection),
-%%   %%...do something useful
-%%   amqp_channel:close(Channel),
-%%   amqp_connection:close(Connection).
-%%
-
-lifecycle_test() ->
-    {ok, Connection} = new_connection(),
-    X = <<"x">>,
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    amqp_channel:call(Channel,
-                      #'exchange.declare'{exchange = X,
-                                          type = <<"topic">>}),
-    Parent = self(),
-    [spawn(fun () -> queue_exchange_binding(Channel, X, Parent, Tag) end)
-     || Tag <- lists:seq(1, ?Latch)],
-    latch_loop(),
-    amqp_channel:call(Channel, #'exchange.delete'{exchange = X}),
-    teardown(Connection, Channel),
-    ok.
-
-direct_no_user_test() ->
-    {ok, Connection} = new_connection(just_direct, [{username, none},
-                                                    {password, none}]),
-    amqp_connection:close(Connection),
-    wait_for_death(Connection).
-
-direct_no_password_test() ->
-    {ok, Connection} = new_connection(just_direct, [{username, <<"guest">>},
-                                                    {password, none}]),
-    amqp_connection:close(Connection),
-    wait_for_death(Connection).
-
-queue_exchange_binding(Channel, X, Parent, Tag) ->
-    receive
-        nothing -> ok
-    after (?Latch - Tag rem 7) * 10 ->
-        ok
-    end,
-    Q = <<"a.b.c", Tag:32>>,
-    Binding = <<"a.b.c.*">>,
-    #'queue.declare_ok'{queue = Q1}
-        = amqp_channel:call(Channel, #'queue.declare'{queue = Q}),
-    ?assertMatch(Q, Q1),
-    Route = #'queue.bind'{queue = Q,
-                          exchange = X,
-                          routing_key = Binding},
-    amqp_channel:call(Channel, Route),
-    amqp_channel:call(Channel, #'queue.delete'{queue = Q}),
-    Parent ! finished.
-
-nowait_exchange_declare_test() ->
-    {ok, Connection} = new_connection(),
-    X = <<"x">>,
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    ?assertEqual(
-      ok,
-      amqp_channel:call(Channel, #'exchange.declare'{exchange = X,
-                                                     type = <<"topic">>,
-                                                     nowait = true})),
-    teardown(Connection, Channel).
-
-channel_lifecycle_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    amqp_channel:close(Channel),
-    {ok, Channel2} = amqp_connection:open_channel(Connection),
-    teardown(Connection, Channel2),
-    ok.
-
-abstract_method_serialization_test(BeforeFun, MultiOpFun, AfterFun) ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    X = <<"test">>,
-    Payload = list_to_binary(["x" || _ <- lists:seq(1, 1000)]),
-    OpsPerProcess = 20,
-    #'exchange.declare_ok'{} =
-        amqp_channel:call(Channel, #'exchange.declare'{exchange = X,
-                                                       type = <<"topic">>}),
-    BeforeRet = BeforeFun(Channel, X),
-    Parent = self(),
-    [spawn(fun () -> Ret = [MultiOpFun(Channel, X, Payload, BeforeRet, I)
-                            || _ <- lists:seq(1, OpsPerProcess)],
-                   Parent ! {finished, Ret}
-           end) || I <- lists:seq(1, ?Latch)],
-    MultiOpRet = latch_loop(),
-    AfterFun(Channel, X, Payload, BeforeRet, MultiOpRet),
-    amqp_channel:call(Channel, #'exchange.delete'{exchange = X}),
-    teardown(Connection, Channel).
-
-%% This is designed to exercize the internal queuing mechanism
-%% to ensure that sync methods are properly serialized
-sync_method_serialization_test() ->
-    abstract_method_serialization_test(
-        fun (_, _) -> ok end,
-        fun (Channel, _, _, _, Count) ->
-                Q = fmt("test-~p", [Count]),
-                #'queue.declare_ok'{queue = Q1} =
-                    amqp_channel:call(Channel,
-                                      #'queue.declare'{queue     = Q,
-                                                       exclusive = true}),
-                ?assertMatch(Q, Q1)
-        end,
-        fun (_, _, _, _, _) -> ok end).
-
-%% This is designed to exercize the internal queuing mechanism
-%% to ensure that sending async methods and then a sync method is serialized
-%% properly
-async_sync_method_serialization_test() ->
-    abstract_method_serialization_test(
-        fun (Channel, _X) ->
-                #'queue.declare_ok'{queue = Q} =
-                    amqp_channel:call(Channel, #'queue.declare'{}),
-                Q
-        end,
-        fun (Channel, X, Payload, _, _) ->
-                %% The async methods
-                ok = amqp_channel:call(Channel,
-                                       #'basic.publish'{exchange = X,
-                                                        routing_key = <<"a">>},
-                                       #amqp_msg{payload = Payload})
-        end,
-        fun (Channel, X, _, Q, _) ->
-                %% The sync method
-                #'queue.bind_ok'{} =
-                    amqp_channel:call(Channel,
-                                      #'queue.bind'{exchange = X,
-                                                    queue = Q,
-                                                    routing_key = <<"a">>}),
-                %% No message should have been routed
-                #'queue.declare_ok'{message_count = 0} =
-                    amqp_channel:call(Channel,
-                                      #'queue.declare'{queue = Q,
-                                                       passive = true})
-        end).
-
-%% This is designed to exercize the internal queuing mechanism
-%% to ensure that sending sync methods and then an async method is serialized
-%% properly
-sync_async_method_serialization_test() ->
-    abstract_method_serialization_test(
-        fun (_, _) -> ok end,
-        fun (Channel, X, _Payload, _, _) ->
-                %% The sync methods (called with cast to resume immediately;
-                %% the order should still be preserved)
-                #'queue.declare_ok'{queue = Q} =
-                    amqp_channel:call(Channel,
-                                      #'queue.declare'{exclusive = true}),
-                amqp_channel:cast(Channel, #'queue.bind'{exchange = X,
-                                                         queue = Q,
-                                                         routing_key= <<"a">>}),
-                Q
-        end,
-        fun (Channel, X, Payload, _, MultiOpRet) ->
-                #'confirm.select_ok'{} = amqp_channel:call(
-                                           Channel, #'confirm.select'{}),
-                ok = amqp_channel:call(Channel,
-                                       #'basic.publish'{exchange = X,
-                                                        routing_key = <<"a">>},
-                                       #amqp_msg{payload = Payload}),
-                %% All queues must have gotten this message
-                true = amqp_channel:wait_for_confirms(Channel),
-                lists:foreach(
-                    fun (Q) ->
-                            #'queue.declare_ok'{message_count = 1} =
-                                amqp_channel:call(
-                                  Channel, #'queue.declare'{queue   = Q,
-                                                            passive = true})
-                    end, lists:flatten(MultiOpRet))
-        end).
-
-queue_unbind_test() ->
-    {ok, Connection} = new_connection(),
-    X = <<"eggs">>, Q = <<"foobar">>, Key = <<"quay">>,
-    Payload = <<"foobar">>,
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    amqp_channel:call(Channel, #'exchange.declare'{exchange = X}),
-    amqp_channel:call(Channel, #'queue.declare'{queue = Q}),
-    Bind = #'queue.bind'{queue = Q,
-                         exchange = X,
-                         routing_key = Key},
-    amqp_channel:call(Channel, Bind),
-    Publish = #'basic.publish'{exchange = X, routing_key = Key},
-    amqp_channel:call(Channel, Publish, Msg = #amqp_msg{payload = Payload}),
-    get_and_assert_equals(Channel, Q, Payload),
-    Unbind = #'queue.unbind'{queue = Q,
-                             exchange = X,
-                             routing_key = Key},
-    amqp_channel:call(Channel, Unbind),
-    amqp_channel:call(Channel, Publish, Msg),
-    get_and_assert_empty(Channel, Q),
-    teardown(Connection, Channel).
-
-get_and_assert_empty(Channel, Q) ->
-    #'basic.get_empty'{}
-        = amqp_channel:call(Channel, #'basic.get'{queue = Q, no_ack = true}).
-
-get_and_assert_equals(Channel, Q, Payload) ->
-    get_and_assert_equals(Channel, Q, Payload, true).
-
-get_and_assert_equals(Channel, Q, Payload, NoAck) ->
-    {GetOk = #'basic.get_ok'{}, Content}
-        = amqp_channel:call(Channel, #'basic.get'{queue = Q, no_ack = NoAck}),
-    #amqp_msg{payload = Payload2} = Content,
-    ?assertMatch(Payload, Payload2),
-    GetOk.
-
-basic_get_test() ->
-    basic_get_test1(new_connection()).
-
-basic_get_ipv6_test() ->
-    basic_get_test1(new_connection(just_network, [{host, "::1"}])).
-
-basic_get_test1({ok, Connection}) ->
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    {ok, Q} = setup_publish(Channel),
-    get_and_assert_equals(Channel, Q, <<"foobar">>),
-    get_and_assert_empty(Channel, Q),
-    teardown(Connection, Channel).
-
-basic_return_test() ->
-    {ok, Connection} = new_connection(),
-    X = <<"test">>,
-    Q = <<"test">>,
-    Key = <<"test">>,
-    Payload = <<"qwerty">>,
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    amqp_channel:register_return_handler(Channel, self()),
-    amqp_channel:call(Channel, #'exchange.declare'{exchange = X}),
-    amqp_channel:call(Channel, #'queue.declare'{queue = Q,
-                                                exclusive = true}),
-    Publish = #'basic.publish'{exchange = X, routing_key = Key,
-                               mandatory = true},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = Payload}),
-    receive
-        {BasicReturn = #'basic.return'{}, Content} ->
-            #'basic.return'{reply_code = ReplyCode,
-                            exchange = X} = BasicReturn,
-            ?assertMatch(?NO_ROUTE, ReplyCode),
-            #amqp_msg{payload = Payload2} = Content,
-            ?assertMatch(Payload, Payload2);
-        WhatsThis1 ->
-            exit({bad_message, WhatsThis1})
-    after 2000 ->
-        exit(no_return_received)
-    end,
-    amqp_channel:unregister_return_handler(Channel),
-    Publish = #'basic.publish'{exchange = X, routing_key = Key,
-                               mandatory = true},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = Payload}),
-    ok = receive
-             {_BasicReturn = #'basic.return'{}, _Content} ->
-                 unexpected_return;
-             WhatsThis2 ->
-                 exit({bad_message, WhatsThis2})
-         after 2000 ->
-                 ok
-         end,
-    amqp_channel:call(Channel, #'exchange.delete'{exchange = X}),
-    teardown(Connection, Channel).
-
-channel_repeat_open_close_test() ->
-    {ok, Connection} = new_connection(),
-    lists:foreach(
-        fun(_) ->
-            {ok, Ch} = amqp_connection:open_channel(Connection),
-            ok = amqp_channel:close(Ch)
-        end, lists:seq(1, 50)),
-    amqp_connection:close(Connection),
-    wait_for_death(Connection).
-
-channel_multi_open_close_test() ->
-    {ok, Connection} = new_connection(),
-    [spawn_link(
-        fun() ->
-            try amqp_connection:open_channel(Connection) of
-                {ok, Ch}           -> try amqp_channel:close(Ch) of
-                                          ok                 -> ok;
-                                          closing            -> ok
-                                      catch
-                                          exit:{noproc, _} -> ok;
-                                          exit:{normal, _} -> ok
-                                      end;
-                closing            -> ok
-            catch
-                exit:{noproc, _} -> ok;
-                exit:{normal, _} -> ok
-            end
-        end) || _ <- lists:seq(1, 50)],
-    erlang:yield(),
-    amqp_connection:close(Connection),
-    wait_for_death(Connection).
-
-basic_ack_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    {ok, Q} = setup_publish(Channel),
-    {#'basic.get_ok'{delivery_tag = Tag}, _}
-        = amqp_channel:call(Channel, #'basic.get'{queue = Q, no_ack = false}),
-    amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag}),
-    teardown(Connection, Channel).
-
-basic_ack_call_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    {ok, Q} = setup_publish(Channel),
-    {#'basic.get_ok'{delivery_tag = Tag}, _}
-        = amqp_channel:call(Channel, #'basic.get'{queue = Q, no_ack = false}),
-    amqp_channel:call(Channel, #'basic.ack'{delivery_tag = Tag}),
-    teardown(Connection, Channel).
-
-basic_consume_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    X = <<"test">>,
-    amqp_channel:call(Channel, #'exchange.declare'{exchange = X}),
-    RoutingKey = <<"key">>,
-    Parent = self(),
-    [spawn_link(fun () ->
-                        consume_loop(Channel, X, RoutingKey, Parent, <<Tag:32>>)
-                end) || Tag <- lists:seq(1, ?Latch)],
-    timer:sleep(?Latch * 20),
-    Publish = #'basic.publish'{exchange = X, routing_key = RoutingKey},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = <<"foobar">>}),
-    latch_loop(),
-    amqp_channel:call(Channel, #'exchange.delete'{exchange = X}),
-    teardown(Connection, Channel).
-
-consume_loop(Channel, X, RoutingKey, Parent, Tag) ->
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Channel, #'queue.declare'{}),
-    #'queue.bind_ok'{} =
-        amqp_channel:call(Channel, #'queue.bind'{queue = Q,
-                                                 exchange = X,
-                                                 routing_key = RoutingKey}),
-    #'basic.consume_ok'{} =
-        amqp_channel:call(Channel,
-                          #'basic.consume'{queue = Q, consumer_tag = Tag}),
-    receive #'basic.consume_ok'{consumer_tag = Tag} -> ok end,
-    receive {#'basic.deliver'{}, _} -> ok end,
-    #'basic.cancel_ok'{} =
-        amqp_channel:call(Channel, #'basic.cancel'{consumer_tag = Tag}),
-    receive #'basic.cancel_ok'{consumer_tag = Tag} -> ok end,
-    Parent ! finished.
-
-consume_notification_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Channel, #'queue.declare'{}),
-    #'basic.consume_ok'{consumer_tag = CTag} = ConsumeOk =
-        amqp_channel:call(Channel, #'basic.consume'{queue = Q}),
-    receive ConsumeOk -> ok end,
-    #'queue.delete_ok'{} =
-        amqp_channel:call(Channel, #'queue.delete'{queue = Q}),
-    receive #'basic.cancel'{consumer_tag = CTag} -> ok end,
-    amqp_channel:close(Channel),
-    ok.
-
-basic_recover_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(
-                        Connection, {amqp_direct_consumer, [self()]}),
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Channel, #'queue.declare'{}),
-    #'basic.consume_ok'{consumer_tag = Tag} =
-        amqp_channel:call(Channel, #'basic.consume'{queue = Q}),
-    receive #'basic.consume_ok'{consumer_tag = Tag} -> ok end,
-    Publish = #'basic.publish'{exchange = <<>>, routing_key = Q},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = <<"foobar">>}),
-    receive
-        {#'basic.deliver'{consumer_tag = Tag}, _} ->
-            %% no_ack set to false, but don't send ack
-            ok
-    end,
-    BasicRecover = #'basic.recover'{requeue = true},
-    amqp_channel:cast(Channel, BasicRecover),
-    receive
-        {#'basic.deliver'{consumer_tag = Tag,
-                          delivery_tag = DeliveryTag2}, _} ->
-            amqp_channel:cast(Channel,
-                              #'basic.ack'{delivery_tag = DeliveryTag2})
-    end,
-    teardown(Connection, Channel).
-
-simultaneous_close_test() ->
-    {ok, Connection} = new_connection(),
-    ChannelNumber = 5,
-    {ok, Channel1} = amqp_connection:open_channel(Connection, ChannelNumber),
-
-    %% Publish to non-existent exchange and immediately close channel
-    amqp_channel:cast(Channel1, #'basic.publish'{exchange = <<"does-not-exist">>,
-                                                 routing_key = <<"a">>},
-                               #amqp_msg{payload = <<"foobar">>}),
-    try amqp_channel:close(Channel1) of
-        ok      -> wait_for_death(Channel1);
-        closing -> wait_for_death(Channel1)
-    catch
-        exit:{noproc, _}                                              -> ok;
-        exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _}}, _} -> ok
-    end,
-
-    %% Channel2 (opened with the exact same number as Channel1)
-    %% should not receive a close_ok (which is intended for Channel1)
-    {ok, Channel2} = amqp_connection:open_channel(Connection, ChannelNumber),
-
-    %% Make sure Channel2 functions normally
-    #'exchange.declare_ok'{} =
-        amqp_channel:call(Channel2, #'exchange.declare'{exchange = <<"test">>}),
-    #'exchange.delete_ok'{} =
-        amqp_channel:call(Channel2, #'exchange.delete'{exchange = <<"test">>}),
-
-    teardown(Connection, Channel2).
-
-channel_tune_negotiation_test() ->
-    {ok, Connection} = new_connection([{channel_max, 10}]),
-    amqp_connection:close(Connection).
-
-basic_qos_test() ->
-    [NoQos, Qos] = [basic_qos_test(Prefetch) || Prefetch <- [0,1]],
-    ExpectedRatio = (1+1) / (1+50/5),
-    FudgeFactor = 2, %% account for timing variations
-    ?assertMatch(true, Qos / NoQos < ExpectedRatio * FudgeFactor).
-
-basic_qos_test(Prefetch) ->
-    {ok, Connection} = new_connection(),
-    Messages = 100,
-    Workers = [5, 50],
-    Parent = self(),
-    {ok, Chan} = amqp_connection:open_channel(Connection),
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Chan, #'queue.declare'{}),
-    Kids = [spawn(
-            fun() ->
-                {ok, Channel} = amqp_connection:open_channel(Connection),
-                amqp_channel:call(Channel,
-                                  #'basic.qos'{prefetch_count = Prefetch}),
-                amqp_channel:call(Channel,
-                                  #'basic.consume'{queue = Q}),
-                Parent ! finished,
-                sleeping_consumer(Channel, Sleep, Parent)
-            end) || Sleep <- Workers],
-    latch_loop(length(Kids)),
-    spawn(fun() -> {ok, Channel} = amqp_connection:open_channel(Connection),
-                   producer_loop(Channel, Q, Messages)
-          end),
-    {Res, _} = timer:tc(erlang, apply, [fun latch_loop/1, [Messages]]),
-    [Kid ! stop || Kid <- Kids],
-    latch_loop(length(Kids)),
-    teardown(Connection, Chan),
-    Res.
-
-sleeping_consumer(Channel, Sleep, Parent) ->
-    receive
-        stop ->
-            do_stop(Channel, Parent);
-        #'basic.consume_ok'{} ->
-            sleeping_consumer(Channel, Sleep, Parent);
-        #'basic.cancel_ok'{}  ->
-            exit(unexpected_cancel_ok);
-        {#'basic.deliver'{delivery_tag = DeliveryTag}, _Content} ->
-            Parent ! finished,
-            receive stop -> do_stop(Channel, Parent)
-            after Sleep -> ok
-            end,
-            amqp_channel:cast(Channel,
-                              #'basic.ack'{delivery_tag = DeliveryTag}),
-            sleeping_consumer(Channel, Sleep, Parent)
-    end.
-
-do_stop(Channel, Parent) ->
-    Parent ! finished,
-    amqp_channel:close(Channel),
-    wait_for_death(Channel),
-    exit(normal).
-
-producer_loop(Channel, _RoutingKey, 0) ->
-    amqp_channel:close(Channel),
-    wait_for_death(Channel),
-    ok;
-
-producer_loop(Channel, RoutingKey, N) ->
-    Publish = #'basic.publish'{exchange = <<>>, routing_key = RoutingKey},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = <<>>}),
-    producer_loop(Channel, RoutingKey, N - 1).
-
-confirm_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'confirm.select_ok'{} = amqp_channel:call(Channel, #'confirm.select'{}),
-    amqp_channel:register_confirm_handler(Channel, self()),
-    {ok, Q} = setup_publish(Channel),
-    {#'basic.get_ok'{}, _}
-        = amqp_channel:call(Channel, #'basic.get'{queue = Q, no_ack = false}),
-    ok = receive
-             #'basic.ack'{}  -> ok;
-             #'basic.nack'{} -> fail
-         after 2000 ->
-                 exit(did_not_receive_pub_ack)
-         end,
-    teardown(Connection, Channel).
-
-confirm_barrier_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'confirm.select_ok'{} = amqp_channel:call(Channel, #'confirm.select'{}),
-    [amqp_channel:call(Channel, #'basic.publish'{routing_key = <<"whoosh">>},
-                       #amqp_msg{payload = <<"foo">>})
-     || _ <- lists:seq(1, 1000)], %% Hopefully enough to get a multi-ack
-    true = amqp_channel:wait_for_confirms(Channel),
-    teardown(Connection, Channel).
-
-confirm_select_before_wait_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    try amqp_channel:wait_for_confirms(Channel) of
-        _ -> exit(success_despite_lack_of_confirm_mode)
-    catch
-        not_in_confirm_mode -> ok
-    end,
-    teardown(Connection, Channel).
-
-confirm_barrier_timeout_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'confirm.select_ok'{} = amqp_channel:call(Channel, #'confirm.select'{}),
-    [amqp_channel:call(Channel, #'basic.publish'{routing_key = <<"whoosh">>},
-                       #amqp_msg{payload = <<"foo">>})
-     || _ <- lists:seq(1, 1000)],
-    case amqp_channel:wait_for_confirms(Channel, 0) of
-        true    -> ok;
-        timeout -> ok
-    end,
-    teardown(Connection, Channel).
-
-confirm_barrier_die_timeout_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'confirm.select_ok'{} = amqp_channel:call(Channel, #'confirm.select'{}),
-    [amqp_channel:call(Channel, #'basic.publish'{routing_key = <<"whoosh">>},
-                       #amqp_msg{payload = <<"foo">>})
-     || _ <- lists:seq(1, 1000)],
-    try amqp_channel:wait_for_confirms_or_die(Channel, 0) of
-        true    -> ok
-    catch
-        exit:timeout -> ok
-    end,
-    amqp_connection:close(Connection),
-    wait_for_death(Connection).
-
-default_consumer_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    amqp_selective_consumer:register_default_consumer(Channel, self()),
-
-    #'queue.declare_ok'{queue = Q}
-        = amqp_channel:call(Channel, #'queue.declare'{}),
-    Pid = spawn(fun () -> receive
-                          after 10000 -> ok
-                          end
-                end),
-    #'basic.consume_ok'{} =
-        amqp_channel:subscribe(Channel, #'basic.consume'{queue = Q}, Pid),
-    erlang:monitor(process, Pid),
-    exit(Pid, shutdown),
-    receive
-        {'DOWN', _, process, _, _} ->
-            io:format("little consumer died out~n")
-    end,
-    Payload = <<"for the default consumer">>,
-    amqp_channel:call(Channel,
-                      #'basic.publish'{exchange = <<>>, routing_key = Q},
-                      #amqp_msg{payload = Payload}),
-
-    receive
-        {#'basic.deliver'{}, #'amqp_msg'{payload = Payload}} ->
-            ok
-    after 1000 ->
-            exit('default_consumer_didnt_work')
-    end,
-    teardown(Connection, Channel).
-
-subscribe_nowait_test() ->
-    {ok, Conn} = new_connection(),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    {ok, Q} = setup_publish(Ch),
-    CTag = <<"ctag">>,
-    amqp_selective_consumer:register_default_consumer(Ch, self()),
-    ok = amqp_channel:call(Ch, #'basic.consume'{queue        = Q,
-                                                consumer_tag = CTag,
-                                                nowait       = true}),
-    ok = amqp_channel:call(Ch, #'basic.cancel' {consumer_tag = CTag,
-                                                nowait       = true}),
-    ok = amqp_channel:call(Ch, #'basic.consume'{queue        = Q,
-                                                consumer_tag = CTag,
-                                                nowait       = true}),
-    receive
-        #'basic.consume_ok'{} ->
-            exit(unexpected_consume_ok);
-        {#'basic.deliver'{delivery_tag = DTag}, _Content} ->
-            amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DTag})
-    end,
-    teardown(Conn, Ch).
-
-basic_nack_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'queue.declare_ok'{queue = Q}
-        = amqp_channel:call(Channel, #'queue.declare'{}),
-
-    Payload = <<"m1">>,
-
-    amqp_channel:call(Channel,
-                      #'basic.publish'{exchange = <<>>, routing_key = Q},
-                      #amqp_msg{payload = Payload}),
-
-    #'basic.get_ok'{delivery_tag = Tag} =
-        get_and_assert_equals(Channel, Q, Payload, false),
-
-    amqp_channel:call(Channel, #'basic.nack'{delivery_tag = Tag,
-                                             multiple     = false,
-                                             requeue      = false}),
-
-    get_and_assert_empty(Channel, Q),
-    teardown(Connection, Channel).
-
-large_content_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    #'queue.declare_ok'{queue = Q}
-        = amqp_channel:call(Channel, #'queue.declare'{}),
-    {A1,A2,A3} = now(), random:seed(A1, A2, A3),
-    F = list_to_binary([random:uniform(256)-1 || _ <- lists:seq(1, 1000)]),
-    Payload = list_to_binary([[F || _ <- lists:seq(1, 1000)]]),
-    Publish = #'basic.publish'{exchange = <<>>, routing_key = Q},
-    amqp_channel:call(Channel, Publish, #amqp_msg{payload = Payload}),
-    get_and_assert_equals(Channel, Q, Payload),
-    teardown(Connection, Channel).
-
-%% ----------------------------------------------------------------------------
-%% Test for the network client
-%% Sends a bunch of messages and immediatly closes the connection without
-%% closing the channel. Then gets the messages back from the queue and expects
-%% all of them to have been sent.
-pub_and_close_test() ->
-    {ok, Connection1} = new_connection(just_network),
-    Payload = <<"eggs">>,
-    NMessages = 50000,
-    {ok, Channel1} = amqp_connection:open_channel(Connection1),
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Channel1, #'queue.declare'{}),
-    %% Send messages
-    pc_producer_loop(Channel1, <<>>, Q, Payload, NMessages),
-    %% Close connection without closing channels
-    amqp_connection:close(Connection1),
-    %% Get sent messages back and count them
-    {ok, Connection2} = new_connection(just_network),
-    {ok, Channel2} = amqp_connection:open_channel(
-                         Connection2, {amqp_direct_consumer, [self()]}),
-    amqp_channel:call(Channel2, #'basic.consume'{queue = Q, no_ack = true}),
-    receive #'basic.consume_ok'{} -> ok end,
-    ?assert(pc_consumer_loop(Channel2, Payload, 0) == NMessages),
-    %% Make sure queue is empty
-    #'queue.declare_ok'{queue = Q, message_count = NRemaining} =
-        amqp_channel:call(Channel2, #'queue.declare'{queue   = Q,
-                                                     passive = true}),
-    ?assert(NRemaining == 0),
-    amqp_channel:call(Channel2, #'queue.delete'{queue = Q}),
-    teardown(Connection2, Channel2),
-    ok.
-
-pc_producer_loop(_, _, _, _, 0) -> ok;
-pc_producer_loop(Channel, X, Key, Payload, NRemaining) ->
-    Publish = #'basic.publish'{exchange = X, routing_key = Key},
-    ok = amqp_channel:call(Channel, Publish, #amqp_msg{payload = Payload}),
-    pc_producer_loop(Channel, X, Key, Payload, NRemaining - 1).
-
-pc_consumer_loop(Channel, Payload, NReceived) ->
-    receive
-        {#'basic.deliver'{},
-         #amqp_msg{payload = DeliveredPayload}} ->
-            case DeliveredPayload of
-                Payload ->
-                    pc_consumer_loop(Channel, Payload, NReceived + 1);
-                _ ->
-                    exit(received_unexpected_content)
-            end
-    after 1000 ->
-        NReceived
-    end.
-
-%%---------------------------------------------------------------------------
-%% This tests whether RPC over AMQP produces the same result as invoking the
-%% same argument against the same underlying gen_server instance.
-rpc_test() ->
-    {ok, Connection} = new_connection(),
-    Fun = fun(X) -> X + 1 end,
-    RPCHandler = fun(X) -> term_to_binary(Fun(binary_to_term(X))) end,
-    Q = <<"rpc-test">>,
-    Server = amqp_rpc_server:start(Connection, Q, RPCHandler),
-    Client = amqp_rpc_client:start(Connection, Q),
-    Input = 1,
-    Reply = amqp_rpc_client:call(Client, term_to_binary(Input)),
-    Expected = Fun(Input),
-    DecodedReply = binary_to_term(Reply),
-    ?assertMatch(Expected, DecodedReply),
-    amqp_rpc_client:stop(Client),
-    amqp_rpc_server:stop(Server),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    amqp_channel:call(Channel, #'queue.delete'{queue = Q}),
-    amqp_connection:close(Connection),
-    wait_for_death(Connection),
-    ok.
-
-%% This tests if the RPC continues to generate valid correlation ids
-%% over a series of requests.
-rpc_client_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    Q = <<"rpc-test">>,
-    Latch = 255, % enough requests to tickle bad correlation ids
-    %% Start a server to return correlation ids to the client.
-    Server = spawn_link(fun() ->
-                                rpc_correlation_server(Channel, Q)
-                        end),
-    %% Generate a series of RPC requests on the same client.
-    Client = amqp_rpc_client:start(Connection, Q),
-    Parent = self(),
-    [spawn(fun() ->
-                   Reply = amqp_rpc_client:call(Client, <<>>),
-                   Parent ! {finished, Reply}
-           end) || _ <- lists:seq(1, Latch)],
-    %% Verify that the correlation ids are valid UTF-8 strings.
-    CorrelationIds = latch_loop(Latch),
-    [?assertMatch(<<_/binary>>, DecodedId)
-     || DecodedId <- [unicode:characters_to_binary(Id, utf8)
-                      || Id <- CorrelationIds]],
-    %% Cleanup.
-    Server ! stop,
-    amqp_rpc_client:stop(Client),
-    amqp_channel:call(Channel, #'queue.delete'{queue = Q}),
-    teardown(Connection, Channel),
-    ok.
-
-%% Consumer of RPC requests that replies with the CorrelationId.
-rpc_correlation_server(Channel, Q) ->
-    amqp_channel:register_return_handler(Channel, self()),
-    amqp_channel:call(Channel, #'queue.declare'{queue = Q}),
-    amqp_channel:call(Channel, #'basic.consume'{queue = Q,
-                                                consumer_tag = <<"server">>}),
-    rpc_client_consume_loop(Channel),
-    amqp_channel:call(Channel, #'basic.cancel'{consumer_tag = <<"server">>}),
-    amqp_channel:unregister_return_handler(Channel).
-
-rpc_client_consume_loop(Channel) ->
-    receive
-        stop ->
-            ok;
-        {#'basic.deliver'{delivery_tag = DeliveryTag},
-         #amqp_msg{props = Props}} ->
-            #'P_basic'{correlation_id = CorrelationId,
-                       reply_to = Q} = Props,
-            Properties = #'P_basic'{correlation_id = CorrelationId},
-            Publish = #'basic.publish'{exchange = <<>>,
-                                       routing_key = Q,
-                                       mandatory = true},
-            amqp_channel:call(
-              Channel, Publish, #amqp_msg{props = Properties,
-                                          payload = CorrelationId}),
-            amqp_channel:call(
-              Channel, #'basic.ack'{delivery_tag = DeliveryTag}),
-            rpc_client_consume_loop(Channel);
-        _ ->
-            rpc_client_consume_loop(Channel)
-    after 3000 ->
-            exit(no_request_received)
-    end.
-
-%%---------------------------------------------------------------------------
-
-%% connection.blocked, connection.unblocked
-
-connection_blocked_network_test() ->
-    {ok, Connection} = new_connection(just_network),
-    X = <<"amq.direct">>,
-    K = Payload = <<"x">>,
-    clear_resource_alarm(memory),
-    timer:sleep(1000),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    Parent = self(),
-    Child = spawn_link(
-              fun() ->
-                      receive
-                          #'connection.blocked'{} -> ok
-                      end,
-                      clear_resource_alarm(memory),
-                      receive
-                          #'connection.unblocked'{} -> ok
-                      end,
-                      Parent ! ok
-              end),
-    amqp_connection:register_blocked_handler(Connection, Child),
-    set_resource_alarm(memory),
-    Publish = #'basic.publish'{exchange = X,
-                               routing_key = K},
-    amqp_channel:call(Channel, Publish,
-                      #amqp_msg{payload = Payload}),
-    timer:sleep(1000),
-    receive
-        ok ->
-            clear_resource_alarm(memory),
-            clear_resource_alarm(disk),
-            ok
-    after 10000 ->
-        clear_resource_alarm(memory),
-        clear_resource_alarm(disk),
-        exit(did_not_receive_connection_blocked)
-    end.
-
-%%---------------------------------------------------------------------------
-
-setup_publish(Channel) ->
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Channel, #'queue.declare'{exclusive = true}),
-    ok = amqp_channel:call(Channel, #'basic.publish'{exchange    = <<>>,
-                                                     routing_key = Q},
-                           #amqp_msg{payload = <<"foobar">>}),
-    {ok, Q}.
-
-teardown(Connection, Channel) ->
-    amqp_channel:close(Channel),
-    wait_for_death(Channel),
-    amqp_connection:close(Connection),
-    wait_for_death(Connection).
-
-teardown_test() ->
-    {ok, Connection} = new_connection(),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    ?assertMatch(true, is_process_alive(Channel)),
-    ?assertMatch(true, is_process_alive(Connection)),
-    teardown(Connection, Channel),
-    ?assertMatch(false, is_process_alive(Channel)),
-    ?assertMatch(false, is_process_alive(Connection)).
-
-wait_for_death(Pid) ->
-    Ref = erlang:monitor(process, Pid),
-    receive {'DOWN', Ref, process, Pid, _Reason} -> ok
-    after ?DeathWait -> exit({timed_out_waiting_for_process_death, Pid})
-    end.
-
-latch_loop() ->
-    latch_loop(?Latch, []).
-
-latch_loop(Latch) ->
-    latch_loop(Latch, []).
-
-latch_loop(0, Acc) ->
-    Acc;
-latch_loop(Latch, Acc) ->
-    receive
-        finished        -> latch_loop(Latch - 1, Acc);
-        {finished, Ret} -> latch_loop(Latch - 1, [Ret | Acc])
-    after ?Latch * ?Wait -> exit(waited_too_long)
-    end.
-
-new_connection() ->
-    new_connection(both, []).
-
-new_connection(AllowedConnectionTypes) when is_atom(AllowedConnectionTypes) ->
-    new_connection(AllowedConnectionTypes, []);
-new_connection(Params) when is_list(Params) ->
-    new_connection(both, Params).
-
-new_connection(AllowedConnectionTypes, Params) ->
-    Params1 =
-        case {AllowedConnectionTypes,
-              os:getenv("AMQP_CLIENT_TEST_CONNECTION_TYPE")} of
-            {just_direct, "network"} ->
-                exit(normal);
-            {just_direct, "network_ssl"} ->
-                exit(normal);
-            {just_network, "direct"} ->
-                exit(normal);
-            {_, "network"} ->
-                make_network_params(Params);
-            {_, "network_ssl"} ->
-                {ok, [[CertsDir]]} = init:get_argument(erlang_client_ssl_dir),
-                make_network_params(
-                  [{ssl_options, [{cacertfile,
-                                   CertsDir ++ "/testca/cacert.pem"},
-                                  {certfile, CertsDir ++ "/client/cert.pem"},
-                                  {keyfile, CertsDir ++ "/client/key.pem"},
-                                  {verify, verify_peer},
-                                  {fail_if_no_peer_cert, true}]}] ++ Params);
-            {_, "direct"} ->
-                make_direct_params([{node, rabbit_nodes:make(rabbit)}] ++
-                                       Params)
-        end,
-    amqp_connection:start(Params1).
-
-%% Note: not all amqp_params_network fields supported.
-make_network_params(Props) ->
-    Pgv = fun (Key, Default) ->
-                  proplists:get_value(Key, Props, Default)
-          end,
-    #amqp_params_network{username     = Pgv(username, <<"guest">>),
-                         password     = Pgv(password, <<"guest">>),
-                         virtual_host = Pgv(virtual_host, <<"/">>),
-                         channel_max  = Pgv(channel_max, 0),
-                         ssl_options  = Pgv(ssl_options, none),
-                         host         = Pgv(host, "localhost")}.
-
-%% Note: not all amqp_params_direct fields supported.
-make_direct_params(Props) ->
-    Pgv = fun (Key, Default) ->
-                  proplists:get_value(Key, Props, Default)
-          end,
-    #amqp_params_direct{username     = Pgv(username, <<"guest">>),
-                        password     = Pgv(password, <<"guest">>),
-                        virtual_host = Pgv(virtual_host, <<"/">>),
-                        node         = Pgv(node, node())}.
-
-set_resource_alarm(memory) ->
-    os:cmd("cd ../rabbitmq-test; make set-resource-alarm SOURCE=memory");
-set_resource_alarm(disk) ->
-    os:cmd("cd ../rabbitmq-test; make set-resource-alarm SOURCE=disk").
-
-
-clear_resource_alarm(memory) ->
-    os:cmd("cd ../rabbitmq-test; make clear-resource-alarm SOURCE=memory");
-clear_resource_alarm(disk) ->
-    os:cmd("cd ../rabbitmq-test; make clear-resource-alarm SOURCE=disk").
-
-fmt(Fmt, Args) -> list_to_binary(rabbit_misc:format(Fmt, Args)).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-federation-management/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-federation-management/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile b/rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/README b/rabbitmq-server/plugins-src/rabbitmq-federation-management/README
deleted file mode 100644 (file)
index a80613d..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-Adds information on federation link status to the management
-plugin. Build it like any other plugin.
-
-If you have a heterogenous cluster (where the nodes have different
-plugins installed), this should be installed on the same nodes as the
-management plugin.
-
-The HTTP API is very simple: GET /api/federation-links.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk b/rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk
deleted file mode 100644 (file)
index 2a0757c..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-management
-
-CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f)
-define construct_app_commands
-       cp -r $(PACKAGE_DIR)/priv $(APP_DIR)
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-federation/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-federation/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/Makefile b/rabbitmq-server/plugins-src/rabbitmq-federation/Makefile
deleted file mode 100644 (file)
index b8305a2..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-CHAIN_TESTS=true
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh
deleted file mode 100755 (executable)
index 9b2708a..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh -e
-sh -e `dirname $0`/rabbit-test.sh "`dirname $0`/../../rabbitmq-server/scripts/rabbitmqctl -n rabbit-test"
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/package.mk b/rabbitmq-server/plugins-src/rabbitmq-federation/package.mk
deleted file mode 100644 (file)
index a4c040f..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-erlang-client rabbitmq-test
-FILTER:=all
-COVER:=false
-WITH_BROKER_TEST_COMMANDS:=rabbit_test_runner:run_in_broker(\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\")
-WITH_BROKER_SETUP_SCRIPTS:=$(PACKAGE_DIR)/etc/setup-rabbit-test.sh
-STANDALONE_TEST_COMMANDS:=rabbit_test_runner:run_multi(\"$(UMBRELLA_BASE_DIR)/rabbitmq-server\",\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\",$(COVER),\"/tmp/rabbitmq-multi-node/plugins\")
-
-# NB: we cannot use PACKAGE_DIR in the body of this rule as it gets
-# expanded at the wrong time and set to the value of a completely
-# arbitrary package!
-$(PACKAGE_DIR)+pre-test:: $(PACKAGE_DIR)+dist
-       rm -rf /tmp/rabbitmq-multi-node/plugins
-       mkdir -p /tmp/rabbitmq-multi-node/plugins/plugins
-       cp -p $(UMBRELLA_BASE_DIR)/rabbitmq-federation/dist/*.ez /tmp/rabbitmq-multi-node/plugins/plugins
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_exchange_test.erl b/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_exchange_test.erl
deleted file mode 100644 (file)
index cce16f8..0000000
+++ /dev/null
@@ -1,744 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ Federation.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_federation_exchange_test).
-
--compile(export_all).
--include("rabbit_federation.hrl").
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
--import(rabbit_federation_util, [name/1]).
--import(rabbit_test_util, [enable_plugin/2, disable_plugin/2]).
-
--import(rabbit_federation_test_util,
-        [expect/3, expect_empty/2,
-         set_upstream/3, clear_upstream/2, set_upstream_set/3,
-         set_policy/4, clear_policy/2,
-         set_policy_upstream/4, set_policy_upstreams/3,
-         disambiguate/1, no_plugins/1, single_cfg/0]).
-
--define(UPSTREAM_DOWNSTREAM, [x(<<"upstream">>),
-                              x(<<"fed.downstream">>)]).
-
-simple_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
-              await_binding(<<"upstream">>, <<"key">>),
-              publish_expect(Ch, <<"upstream">>, <<"key">>, Q, <<"HELLO">>)
-      end, ?UPSTREAM_DOWNSTREAM).
-
-multiple_upstreams_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q = bind_queue(Ch, <<"fed12.downstream">>, <<"key">>),
-              await_binding(<<"upstream">>, <<"key">>),
-              await_binding(<<"upstream2">>, <<"key">>),
-              publish_expect(Ch, <<"upstream">>, <<"key">>, Q, <<"HELLO1">>),
-              publish_expect(Ch, <<"upstream2">>, <<"key">>, Q, <<"HELLO2">>)
-      end, [x(<<"upstream">>),
-            x(<<"upstream2">>),
-            x(<<"fed12.downstream">>)]).
-
-multiple_uris_test() ->
-    %% We can't use a direct connection for Kill() to work.
-    set_upstream(single_cfg(), <<"localhost">>,
-                 [<<"amqp://localhost">>, <<"amqp://localhost:5672">>]),
-    WithCh = fun(F) ->
-                     {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-                     {ok, Ch} = amqp_connection:open_channel(Conn),
-                     F(Ch),
-                     amqp_connection:close(Conn)
-             end,
-    WithCh(fun (Ch) -> declare_all(Ch, ?UPSTREAM_DOWNSTREAM) end),
-    expect_uris([<<"amqp://localhost">>, <<"amqp://localhost:5672">>]),
-    WithCh(fun (Ch) -> delete_all(Ch, ?UPSTREAM_DOWNSTREAM) end),
-    %% Put back how it was
-    set_upstream(single_cfg(), <<"localhost">>, <<"amqp://">>).
-
-expect_uris([])   -> ok;
-expect_uris(URIs) -> [Link] = rabbit_federation_status:status(),
-                     URI = pget(uri, Link),
-                     kill_only_connection(n("rabbit-test")),
-                     expect_uris(URIs -- [URI]).
-
-kill_only_connection(Node) ->
-    case connection_pids(Node) of
-        [Pid] -> catch rabbit_networking:close_connection(Pid, "boom"), %% [1]
-                 wait_for_pid_to_die(Node, Pid);
-        _     -> timer:sleep(100),
-                 kill_only_connection(Node)
-    end.
-
-%% [1] the catch is because we could still see a connection from a
-%% previous time round. If so that's fine (we'll just loop around
-%% again) but we don't want the test to fail because a connection
-%% closed as we were trying to close it.
-
-wait_for_pid_to_die(Node, Pid) ->
-    case connection_pids(Node) of
-        [Pid] -> timer:sleep(100),
-                 wait_for_pid_to_die(Node, Pid);
-        _     -> ok
-    end.
-
-
-multiple_downstreams_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q1 = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
-              Q12 = bind_queue(Ch, <<"fed12.downstream2">>, <<"key">>),
-              await_binding(<<"upstream">>, <<"key">>, 2),
-              await_binding(<<"upstream2">>, <<"key">>),
-              publish(Ch, <<"upstream">>, <<"key">>, <<"HELLO1">>),
-              publish(Ch, <<"upstream2">>, <<"key">>, <<"HELLO2">>),
-              expect(Ch, Q1, [<<"HELLO1">>]),
-              expect(Ch, Q12, [<<"HELLO1">>, <<"HELLO2">>])
-      end, ?UPSTREAM_DOWNSTREAM ++
-          [x(<<"upstream2">>),
-           x(<<"fed12.downstream2">>)]).
-
-e2e_test() ->
-    with_ch(
-      fun (Ch) ->
-              bind_exchange(Ch, <<"downstream2">>, <<"fed.downstream">>,
-                            <<"key">>),
-              await_binding(<<"upstream">>, <<"key">>),
-              Q = bind_queue(Ch, <<"downstream2">>, <<"key">>),
-              publish_expect(Ch, <<"upstream">>, <<"key">>, Q, <<"HELLO1">>)
-      end, ?UPSTREAM_DOWNSTREAM ++ [x(<<"downstream2">>)]).
-
-unbind_on_delete_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q1 = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
-              Q2 = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
-              await_binding(<<"upstream">>, <<"key">>),
-              delete_queue(Ch, Q2),
-              publish_expect(Ch, <<"upstream">>, <<"key">>, Q1, <<"HELLO">>)
-      end, ?UPSTREAM_DOWNSTREAM).
-
-unbind_on_unbind_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q1 = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
-              Q2 = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
-              await_binding(<<"upstream">>, <<"key">>),
-              unbind_queue(Ch, Q2, <<"fed.downstream">>, <<"key">>),
-              publish_expect(Ch, <<"upstream">>, <<"key">>, Q1, <<"HELLO">>),
-              delete_queue(Ch, Q2)
-      end, ?UPSTREAM_DOWNSTREAM).
-
-user_id_with() -> disambiguate(start_ab).
-user_id([Rabbit, Hare]) ->
-    set_policy_upstream(Rabbit, <<"^test$">>, <<"amqp://localhost:5673">>, []),
-    Perm = fun (F, A) ->
-                  ok = rpc:call(pget(node, Hare),
-                                rabbit_auth_backend_internal, F, A)
-           end,
-    Perm(add_user, [<<"hare-user">>, <<"hare-user">>]),
-    Perm(set_permissions, [<<"hare-user">>,
-                           <<"/">>, <<".*">>, <<".*">>, <<".*">>]),
-
-    {_, Ch} = rabbit_test_util:connect(Rabbit),
-    {ok, Conn2} = amqp_connection:start(
-                    #amqp_params_network{username = <<"hare-user">>,
-                                         password = <<"hare-user">>,
-                                         port     = pget(port, Hare)}),
-    {ok, Ch2} = amqp_connection:open_channel(Conn2),
-
-    declare_exchange(Ch2, x(<<"test">>)),
-    declare_exchange(Ch, x(<<"test">>)),
-    Q = bind_queue(Ch, <<"test">>, <<"key">>),
-    await_binding(Hare, <<"test">>, <<"key">>),
-
-    Msg = #amqp_msg{props   = #'P_basic'{user_id = <<"hare-user">>},
-                    payload = <<"HELLO">>},
-
-    SafeUri = fun (H) ->
-                      {array, [{table, Recv}]} =
-                          rabbit_misc:table_lookup(
-                            H, <<"x-received-from">>),
-                      ?assertEqual(
-                         {longstr, <<"amqp://localhost:5673">>},
-                         rabbit_misc:table_lookup(Recv, <<"uri">>))
-              end,
-    ExpectUser =
-        fun (ExpUser) ->
-                fun () ->
-                        receive
-                            {#'basic.deliver'{},
-                             #amqp_msg{props   = Props,
-                                       payload = Payload}} ->
-                                #'P_basic'{user_id = ActUser,
-                                           headers = Headers} = Props,
-                                SafeUri(Headers),
-                                ?assertEqual(<<"HELLO">>, Payload),
-                                ?assertEqual(ExpUser, ActUser)
-                        end
-                end
-        end,
-
-    publish(Ch2, <<"test">>, <<"key">>, Msg),
-    expect(Ch, Q, ExpectUser(undefined)),
-
-    set_policy_upstream(Rabbit, <<"^test$">>, <<"amqp://localhost:5673">>,
-                        [{<<"trust-user-id">>, true}]),
-
-    publish(Ch2, <<"test">>, <<"key">>, Msg),
-    expect(Ch, Q, ExpectUser(<<"hare-user">>)),
-
-    ok.
-
-%% In order to test that unbinds get sent we deliberately set up a
-%% broken config - with topic upstream and fanout downstream. You
-%% shouldn't really do this, but it lets us see "extra" messages that
-%% get sent.
-unbind_gets_transmitted_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q11 = bind_queue(Ch, <<"fed.downstream">>, <<"key1">>),
-              Q12 = bind_queue(Ch, <<"fed.downstream">>, <<"key1">>),
-              Q21 = bind_queue(Ch, <<"fed.downstream">>, <<"key2">>),
-              Q22 = bind_queue(Ch, <<"fed.downstream">>, <<"key2">>),
-              await_binding(<<"upstream">>, <<"key1">>),
-              await_binding(<<"upstream">>, <<"key2">>),
-              [delete_queue(Ch, Q) || Q <- [Q12, Q21, Q22]],
-              publish(Ch, <<"upstream">>, <<"key1">>, <<"YES">>),
-              publish(Ch, <<"upstream">>, <<"key2">>, <<"NO">>),
-              expect(Ch, Q11, [<<"YES">>]),
-              expect_empty(Ch, Q11)
-      end, [x(<<"upstream">>),
-            x(<<"fed.downstream">>)]).
-
-no_loop_test() ->
-    with_ch(
-      fun (Ch) ->
-              Q1 = bind_queue(Ch, <<"one">>, <<"key">>),
-              Q2 = bind_queue(Ch, <<"two">>, <<"key">>),
-              await_binding(<<"one">>, <<"key">>, 2),
-              await_binding(<<"two">>, <<"key">>, 2),
-              publish(Ch, <<"one">>, <<"key">>, <<"Hello from one">>),
-              publish(Ch, <<"two">>, <<"key">>, <<"Hello from two">>),
-              expect(Ch, Q1, [<<"Hello from one">>, <<"Hello from two">>]),
-              expect(Ch, Q2, [<<"Hello from one">>, <<"Hello from two">>]),
-              expect_empty(Ch, Q1),
-              expect_empty(Ch, Q2)
-      end, [x(<<"one">>),
-            x(<<"two">>)]).
-
-binding_recovery_with() -> disambiguate(
-                             fun (Init) ->
-                                     rabbit_test_configs:start_nodes(Init, [a])
-                             end).
-binding_recovery([Rabbit]) ->
-    Q = <<"durable-Q">>,
-    {_, Ch} = rabbit_test_util:connect(Rabbit),
-
-    rabbit_federation_test_util:set_upstream(
-      Rabbit, <<"rabbit">>, <<"amqp://localhost:5672">>),
-    rabbit_federation_test_util:set_upstream_set(
-      Rabbit, <<"upstream">>,
-      [{<<"rabbit">>, [{<<"exchange">>, <<"upstream">>}]},
-       {<<"rabbit">>, [{<<"exchange">>, <<"upstream2">>}]}]),
-    rabbit_federation_test_util:set_policy(
-      Rabbit, <<"fed">>, <<"^fed\\.">>, <<"upstream">>),
-
-    declare_all(Ch, [x(<<"upstream2">>) | ?UPSTREAM_DOWNSTREAM]),
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Ch, #'queue.declare'{queue   = Q,
-                                               durable = true}),
-    bind_queue(Ch, Q, <<"fed.downstream">>, <<"key">>),
-    timer:sleep(100), %% To get the suffix written
-
-    %% i.e. don't clean up
-    Rabbit2 = rabbit_test_configs:restart_node(Rabbit),
-
-    ?assert(none =/= suffix(Rabbit2, <<"rabbit">>, "upstream")),
-    ?assert(none =/= suffix(Rabbit2, <<"rabbit">>, "upstream2")),
-
-    %% again don't clean up
-    Rabbit3 = rabbit_test_configs:restart_node(Rabbit2),
-    {_, Ch3} = rabbit_test_util:connect(Rabbit3),
-
-    rabbit_test_util:set_param(
-      Rabbit, <<"federation-upstream-set">>, <<"upstream">>,
-      [[{<<"upstream">>, <<"rabbit">>}, {<<"exchange">>, <<"upstream">>}]]),
-
-    publish_expect(Ch3, <<"upstream">>, <<"key">>, Q, <<"HELLO">>),
-    ?assert(none =/= suffix(Rabbit3, <<"rabbit">>, "upstream")),
-    ?assertEqual(none, suffix(Rabbit3, <<"rabbit">>, "upstream2")),
-    delete_all(Ch3, [x(<<"upstream2">>) | ?UPSTREAM_DOWNSTREAM]),
-    delete_queue(Ch3, Q),
-    ok.
-
-suffix(Cfg, Name, XName) ->
-    rpc:call(pget(node, Cfg), rabbit_federation_db, get_active_suffix,
-             [r(<<"fed.downstream">>),
-              #upstream{name          = Name,
-                        exchange_name = list_to_binary(XName)}, none]).
-
-%% TODO remove
-n(Nodename) ->
-    {_, NodeHost} = rabbit_nodes:parts(node()),
-    rabbit_nodes:make({Nodename, NodeHost}).
-
-restart_upstream_with() -> disambiguate(start_ab).
-restart_upstream([Rabbit, Hare]) ->
-    {_, Downstream} = rabbit_test_util:connect(Rabbit),
-    {_, Upstream}   = rabbit_test_util:connect(Hare),
-
-    rabbit_federation_test_util:set_upstream(
-      Rabbit, <<"hare">>, <<"amqp://localhost:5673">>),
-    rabbit_federation_test_util:set_upstream_set(
-      Rabbit, <<"upstream">>,
-      [{<<"hare">>, [{<<"exchange">>, <<"upstream">>}]}]),
-    rabbit_federation_test_util:set_policy(
-      Rabbit, <<"hare">>, <<"^hare\\.">>, <<"upstream">>),
-
-    declare_exchange(Upstream, x(<<"upstream">>)),
-    declare_exchange(Downstream, x(<<"hare.downstream">>)),
-
-    Qstays = bind_queue(Downstream, <<"hare.downstream">>, <<"stays">>),
-    Qgoes = bind_queue(Downstream, <<"hare.downstream">>, <<"goes">>),
-
-    Hare2 = rabbit_test_configs:stop_node(Hare),
-
-    Qcomes = bind_queue(Downstream, <<"hare.downstream">>, <<"comes">>),
-    unbind_queue(Downstream, Qgoes, <<"hare.downstream">>, <<"goes">>),
-
-    Hare3 = rabbit_test_configs:start_node(Hare2),
-    {_, Upstream1} = rabbit_test_util:connect(Hare3),
-
-    %% Wait for the link to come up and for these bindings
-    %% to be transferred
-    await_binding(Hare, <<"upstream">>, <<"comes">>, 1),
-    await_binding_absent(Hare, <<"upstream">>, <<"goes">>),
-    await_binding(Hare, <<"upstream">>, <<"stays">>, 1),
-
-    publish(Upstream1, <<"upstream">>, <<"goes">>, <<"GOES">>),
-    publish(Upstream1, <<"upstream">>, <<"stays">>, <<"STAYS">>),
-    publish(Upstream1, <<"upstream">>, <<"comes">>, <<"COMES">>),
-
-    expect(Downstream, Qstays, [<<"STAYS">>]),
-    expect(Downstream, Qcomes, [<<"COMES">>]),
-    expect_empty(Downstream, Qgoes),
-
-    delete_exchange(Downstream, <<"hare.downstream">>),
-    delete_exchange(Upstream1, <<"upstream">>),
-    ok.
-
-%% flopsy, mopsy and cottontail, connected in a ring with max_hops = 2
-%% for each connection. We should not see any duplicates.
-
-max_hops_with() -> disambiguate(start_abc).
-max_hops([Flopsy, Mopsy, Cottontail]) ->
-    [set_policy_upstream(
-       Cfg, <<"^ring$">>,
-       list_to_binary("amqp://localhost:" ++ integer_to_list(Port)),
-       [{<<"max-hops">>, 2}])
-     || {Cfg, Port} <- [{Flopsy,     pget(port, Cottontail)},
-                        {Mopsy,      pget(port, Flopsy)},
-                        {Cottontail, pget(port, Mopsy)}]],
-
-    {_, FlopsyCh}     = rabbit_test_util:connect(Flopsy),
-    {_, MopsyCh}      = rabbit_test_util:connect(Mopsy),
-    {_, CottontailCh} = rabbit_test_util:connect(Cottontail),
-
-    declare_exchange(FlopsyCh,     x(<<"ring">>)),
-    declare_exchange(MopsyCh,      x(<<"ring">>)),
-    declare_exchange(CottontailCh, x(<<"ring">>)),
-
-    Q1 = bind_queue(FlopsyCh,     <<"ring">>, <<"key">>),
-    Q2 = bind_queue(MopsyCh,      <<"ring">>, <<"key">>),
-    Q3 = bind_queue(CottontailCh, <<"ring">>, <<"key">>),
-
-    await_binding(Flopsy,     <<"ring">>, <<"key">>, 3),
-    await_binding(Mopsy,      <<"ring">>, <<"key">>, 3),
-    await_binding(Cottontail, <<"ring">>, <<"key">>, 3),
-
-    publish(FlopsyCh,     <<"ring">>, <<"key">>, <<"HELLO flopsy">>),
-    publish(MopsyCh,      <<"ring">>, <<"key">>, <<"HELLO mopsy">>),
-    publish(CottontailCh, <<"ring">>, <<"key">>, <<"HELLO cottontail">>),
-
-    Msgs = [<<"HELLO flopsy">>, <<"HELLO mopsy">>, <<"HELLO cottontail">>],
-    expect(FlopsyCh,     Q1, Msgs),
-    expect(MopsyCh,      Q2, Msgs),
-    expect(CottontailCh, Q3, Msgs),
-    expect_empty(FlopsyCh,     Q1),
-    expect_empty(MopsyCh,      Q2),
-    expect_empty(CottontailCh, Q3),
-    ok.
-
-%% Two nodes, both federated with each other, and max_hops set to a
-%% high value. Things should not get out of hand.
-cycle_detection_with() -> disambiguate(start_ab).
-cycle_detection([Cycle1, Cycle2]) ->
-    [set_policy_upstream(
-       Cfg, <<"^cycle$">>,
-       list_to_binary("amqp://localhost:" ++ integer_to_list(Port)),
-       [{<<"max-hops">>, 10}])
-     || {Cfg, Port} <- [{Cycle1, pget(port, Cycle2)},
-                        {Cycle2, pget(port, Cycle1)}]],
-
-    {_, Cycle1Ch} = rabbit_test_util:connect(Cycle1),
-    {_, Cycle2Ch} = rabbit_test_util:connect(Cycle2),
-
-    declare_exchange(Cycle1Ch, x(<<"cycle">>)),
-    declare_exchange(Cycle2Ch, x(<<"cycle">>)),
-
-    Q1 = bind_queue(Cycle1Ch, <<"cycle">>, <<"key">>),
-    Q2 = bind_queue(Cycle2Ch, <<"cycle">>, <<"key">>),
-
-    %% "key" present twice because once for the local queue and once
-    %% for federation in each case
-    await_binding(Cycle1, <<"cycle">>, <<"key">>, 2),
-    await_binding(Cycle2, <<"cycle">>, <<"key">>, 2),
-
-    publish(Cycle1Ch, <<"cycle">>, <<"key">>, <<"HELLO1">>),
-    publish(Cycle2Ch, <<"cycle">>, <<"key">>, <<"HELLO2">>),
-
-    Msgs = [<<"HELLO1">>, <<"HELLO2">>],
-    expect(Cycle1Ch, Q1, Msgs),
-    expect(Cycle2Ch, Q2, Msgs),
-    expect_empty(Cycle1Ch, Q1),
-    expect_empty(Cycle2Ch, Q2),
-
-    ok.
-
-%% Arrows indicate message flow. Numbers indicate max_hops.
-%%
-%% Dylan ---1--> Bugs ---2--> Jessica
-%% |^                              |^
-%% |\--------------1---------------/|
-%% \---------------1----------------/
-%%
-%%
-%% We want to demonstrate that if we bind a queue locally at each
-%% broker, (exactly) the following bindings propagate:
-%%
-%% Bugs binds to Dylan
-%% Jessica binds to Bugs, which then propagates on to Dylan
-%% Jessica binds to Dylan directly
-%% Dylan binds to Jessica.
-%%
-%% i.e. Dylan has two bindings from Jessica and one from Bugs
-%%      Bugs has one binding from Jessica
-%%      Jessica has one binding from Dylan
-%%
-%% So we tag each binding with its original broker and see how far it gets
-%%
-%% Also we check that when we tear down the original bindings
-%% that we get rid of everything again.
-
-binding_propagation_with() -> disambiguate(start_abc).
-binding_propagation([Dylan, Bugs, Jessica]) ->
-    set_policy_upstream( Dylan,   <<"^x$">>, <<"amqp://localhost:5674">>, []),
-    set_policy_upstream( Bugs,    <<"^x$">>, <<"amqp://localhost:5672">>, []),
-    set_policy_upstreams(Jessica, <<"^x$">>, [{<<"amqp://localhost:5672">>, []},
-                                              {<<"amqp://localhost:5673">>,
-                                               [{<<"max-hops">>, 2}]}]),
-    {_, DylanCh}   = rabbit_test_util:connect(Dylan),
-    {_, BugsCh}    = rabbit_test_util:connect(Bugs),
-    {_, JessicaCh} = rabbit_test_util:connect(Jessica),
-
-    declare_exchange(DylanCh,   x(<<"x">>)),
-    declare_exchange(BugsCh,    x(<<"x">>)),
-    declare_exchange(JessicaCh, x(<<"x">>)),
-
-    Q1 = bind_queue(DylanCh,   <<"x">>, <<"dylan">>),
-    Q2 = bind_queue(BugsCh,    <<"x">>, <<"bugs">>),
-    Q3 = bind_queue(JessicaCh, <<"x">>, <<"jessica">>),
-
-    await_binding( Dylan,   <<"x">>, <<"jessica">>, 2),
-    await_bindings(Dylan,   <<"x">>, [<<"bugs">>, <<"dylan">>]),
-    await_bindings(Bugs,    <<"x">>, [<<"jessica">>, <<"bugs">>]),
-    await_bindings(Jessica, <<"x">>, [<<"dylan">>, <<"jessica">>]),
-
-    delete_queue(DylanCh,   Q1),
-    delete_queue(BugsCh,    Q2),
-    delete_queue(JessicaCh, Q3),
-
-    await_bindings(Dylan,   <<"x">>, []),
-    await_bindings(Bugs,    <<"x">>, []),
-    await_bindings(Jessica, <<"x">>, []),
-
-    ok.
-
-upstream_has_no_federation_with() ->
-    disambiguate(fun (Init) ->
-                         Inits = [Init, no_plugins(Init)],
-                         rabbit_test_configs:start_nodes(Inits, [a, b])
-                 end).
-upstream_has_no_federation([Rabbit, Hare]) ->
-    set_policy_upstream(Rabbit, <<"^test$">>, <<"amqp://localhost:5673">>, []),
-    {_, Downstream} = rabbit_test_util:connect(Rabbit),
-    {_, Upstream}   = rabbit_test_util:connect(Hare),
-    declare_exchange(Upstream, x(<<"test">>)),
-    declare_exchange(Downstream, x(<<"test">>)),
-    Q = bind_queue(Downstream, <<"test">>, <<"routing">>),
-    await_binding(Hare, <<"test">>, <<"routing">>),
-    publish(Upstream, <<"test">>, <<"routing">>, <<"HELLO">>),
-    expect(Downstream, Q, [<<"HELLO">>]),
-    ok.
-
-dynamic_reconfiguration_test() ->
-    Cfg = single_cfg(),
-    with_ch(
-      fun (_Ch) ->
-              Xs = [<<"all.fed1">>, <<"all.fed2">>],
-              %% Left from the conf we set up for previous tests
-              assert_connections(Xs, [<<"localhost">>, <<"local5673">>]),
-
-              %% Test that clearing connections works
-              clear_upstream(Cfg, <<"localhost">>),
-              clear_upstream(Cfg, <<"local5673">>),
-              assert_connections(Xs, []),
-
-              %% Test that readding them and changing them works
-              set_upstream(Cfg, <<"localhost">>, <<"amqp://localhost">>),
-              %% Do it twice so we at least hit the no-restart optimisation
-              set_upstream(Cfg, <<"localhost">>, <<"amqp://">>),
-              set_upstream(Cfg, <<"localhost">>, <<"amqp://">>),
-              assert_connections(Xs, [<<"localhost">>]),
-
-              %% And re-add the last - for next test
-              set_upstream(Cfg, <<"local5673">>, <<"amqp://localhost:5673">>)
-      end, [x(<<"all.fed1">>), x(<<"all.fed2">>)]).
-
-dynamic_reconfiguration_integrity_test() ->
-    Cfg = single_cfg(),
-    with_ch(
-      fun (_Ch) ->
-              Xs = [<<"new.fed1">>, <<"new.fed2">>],
-
-              %% Declared exchanges with nonexistent set - no links
-              assert_connections(Xs, []),
-
-              %% Create the set - links appear
-              set_upstream_set(Cfg, <<"new-set">>, [{<<"localhost">>, []}]),
-              assert_connections(Xs, [<<"localhost">>]),
-
-              %% Add nonexistent connections to set - nothing breaks
-              set_upstream_set(
-                Cfg, <<"new-set">>, [{<<"localhost">>, []},
-                                     {<<"does-not-exist">>, []}]),
-              assert_connections(Xs, [<<"localhost">>]),
-
-              %% Change connection in set - links change
-              set_upstream_set(Cfg, <<"new-set">>, [{<<"local5673">>, []}]),
-              assert_connections(Xs, [<<"local5673">>])
-      end, [x(<<"new.fed1">>), x(<<"new.fed2">>)]).
-
-federate_unfederate_test() ->
-    Cfg = single_cfg(),
-    with_ch(
-      fun (_Ch) ->
-              Xs = [<<"dyn.exch1">>, <<"dyn.exch2">>],
-
-              %% Declared non-federated exchanges - no links
-              assert_connections(Xs, []),
-
-              %% Federate them - links appear
-              set_policy(Cfg, <<"dyn">>, <<"^dyn\\.">>, <<"all">>),
-              assert_connections(Xs, [<<"localhost">>, <<"local5673">>]),
-
-              %% Change policy - links change
-              set_policy(Cfg, <<"dyn">>, <<"^dyn\\.">>, <<"localhost">>),
-              assert_connections(Xs, [<<"localhost">>]),
-
-              %% Unfederate them - links disappear
-              clear_policy(Cfg, <<"dyn">>),
-              assert_connections(Xs, [])
-      end, [x(<<"dyn.exch1">>), x(<<"dyn.exch2">>)]).
-
-dynamic_plugin_stop_start_test() ->
-    Cfg = single_cfg(),
-    X1 = <<"dyn.exch1">>,
-    X2 = <<"dyn.exch2">>,
-    with_ch(
-      fun (Ch) ->
-              set_policy(Cfg, <<"dyn">>, <<"^dyn\\.">>, <<"localhost">>),
-
-              %% Declare federated exchange - get link
-              assert_connections([X1], [<<"localhost">>]),
-
-              %% Disable plugin, link goes
-              ok = disable_plugin(Cfg, "rabbitmq_federation"),
-              %% We can't check with status for obvious reasons...
-              undefined = whereis(rabbit_federation_sup),
-              {error, not_found} = rabbit_registry:lookup_module(
-                                     exchange, 'x-federation-upstream'),
-
-              %% Create exchange then re-enable plugin, links appear
-              declare_exchange(Ch, x(X2)),
-              ok = enable_plugin(Cfg, "rabbitmq_federation"),
-              assert_connections([X1, X2], [<<"localhost">>]),
-              {ok, _} = rabbit_registry:lookup_module(
-                          exchange, 'x-federation-upstream'),
-
-              %% Test both exchanges work. They are just federated to
-              %% themselves so should duplicate messages.
-              [begin
-                   Q = bind_queue(Ch, X, <<"key">>),
-                   await_binding(Cfg, X, <<"key">>, 2),
-                   publish(Ch, X, <<"key">>, <<"HELLO">>),
-                   expect(Ch, Q, [<<"HELLO">>, <<"HELLO">>]),
-                   delete_queue(Ch, Q)
-               end || X <- [X1, X2]],
-
-              clear_policy(Cfg, <<"dyn">>),
-              assert_connections([X1, X2], [])
-      end, [x(X1)]).
-
-%%----------------------------------------------------------------------------
-
-with_ch(Fun, Xs) ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    declare_all(Ch, Xs),
-    rabbit_federation_test_util:assert_status(
-      Xs, {exchange, upstream_exchange}),
-    Fun(Ch),
-    delete_all(Ch, Xs),
-    amqp_connection:close(Conn),
-    cleanup(single_cfg()),
-    ok.
-
-cleanup(Cfg) ->
-    [rpc:call(pget(node, Cfg), rabbit_amqqueue, delete, [Q, false, false]) ||
-        Q <- queues(pget(node, Cfg))].
-
-queues(Node) ->
-    case rpc:call(Node, rabbit_amqqueue, list, [<<"/">>]) of
-        {badrpc, _} -> [];
-        Qs          -> Qs
-    end.
-
-stop_other_node(Node) ->
-    cleanup(Node),
-    rabbit_federation_test_util:stop_other_node(Node).
-
-declare_all(Ch, Xs) -> [declare_exchange(Ch, X) || X <- Xs].
-delete_all(Ch, Xs) ->
-    [delete_exchange(Ch, X) || #'exchange.declare'{exchange = X} <- Xs].
-
-declare_exchange(Ch, X) ->
-    amqp_channel:call(Ch, X).
-
-x(Name) -> x(Name, <<"topic">>).
-
-x(Name, Type) ->
-    #'exchange.declare'{exchange = Name,
-                        type     = Type,
-                        durable  = true}.
-
-r(Name) -> rabbit_misc:r(<<"/">>, exchange, Name).
-
-declare_queue(Ch) ->
-    #'queue.declare_ok'{queue = Q} =
-        amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
-    Q.
-
-bind_queue(Ch, Q, X, Key) ->
-    amqp_channel:call(Ch, #'queue.bind'{queue       = Q,
-                                        exchange    = X,
-                                        routing_key = Key}).
-
-unbind_queue(Ch, Q, X, Key) ->
-    amqp_channel:call(Ch, #'queue.unbind'{queue       = Q,
-                                          exchange    = X,
-                                          routing_key = Key}).
-
-bind_exchange(Ch, D, S, Key) ->
-    amqp_channel:call(Ch, #'exchange.bind'{destination = D,
-                                           source      = S,
-                                           routing_key = Key}).
-
-bind_queue(Ch, X, Key) ->
-    Q = declare_queue(Ch),
-    bind_queue(Ch, Q, X, Key),
-    Q.
-
-delete_exchange(Ch, X) ->
-    amqp_channel:call(Ch, #'exchange.delete'{exchange = X}).
-
-delete_queue(Ch, Q) ->
-    amqp_channel:call(Ch, #'queue.delete'{queue = Q}).
-
-await_binding(X, Key)         -> await_binding(single_cfg(), X, Key, 1).
-await_binding(X, Key, Count)
-  when is_binary(X)           -> await_binding(single_cfg(), X, Key, Count);
-await_binding(Broker, X, Key) -> await_binding(Broker,       X, Key, 1).
-
-await_binding(Node, X, Key, Count) when is_atom(Node) ->
-    case bound_keys_from(Node, X, Key) of
-        L when length(L) <   Count -> timer:sleep(100),
-                                      await_binding(Node, X, Key, Count);
-        L when length(L) =:= Count -> ok;
-        L                          -> exit({too_many_bindings,
-                                            X, Key, Count, L})
-    end;
-await_binding(Cfg, X, Key, Count) ->
-     await_binding(pget(node, Cfg), X, Key, Count).
-
-await_bindings(Broker, X, Keys) ->
-    [await_binding(Broker, X, Key) || Key <- Keys].
-
-await_binding_absent(Node, X, Key) when is_atom(Node) ->
-    case bound_keys_from(Node, X, Key) of
-        [] -> ok;
-        _  -> timer:sleep(100),
-              await_binding_absent(Node, X, Key)
-    end;
-await_binding_absent(Cfg, X, Key) ->
-     await_binding_absent(pget(node, Cfg), X, Key).
-
-bound_keys_from(Node, X, Key) ->
-    [K || #binding{key = K} <-
-              rpc:call(Node, rabbit_binding, list_for_source, [r(X)]),
-          K =:= Key].
-
-publish(Ch, X, Key, Payload) when is_binary(Payload) ->
-    publish(Ch, X, Key, #amqp_msg{payload = Payload});
-
-publish(Ch, X, Key, Msg = #amqp_msg{}) ->
-    amqp_channel:call(Ch, #'basic.publish'{exchange    = X,
-                                           routing_key = Key}, Msg).
-
-publish_expect(Ch, X, Key, Q, Payload) ->
-    publish(Ch, X, Key, Payload),
-    expect(Ch, Q, [Payload]).
-
-%%----------------------------------------------------------------------------
-
-assert_connections(Xs, Conns) ->
-    Links = [{X, C, X} ||
-                X <- Xs,
-                C <- Conns],
-    Remaining = lists:foldl(
-                  fun (Link, Status) ->
-                          rabbit_federation_test_util:assert_link_status(
-                            Link, Status, {exchange, upstream_exchange})
-                  end, rabbit_federation_status:status(), Links),
-    ?assertEqual([], Remaining),
-    ok.
-
-connection_pids(Node) ->
-    [P || [{pid, P}] <-
-              rpc:call(Node, rabbit_networking, connection_info_all, [[pid]])].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_queue_test.erl b/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_queue_test.erl
deleted file mode 100644 (file)
index d58c0d6..0000000
+++ /dev/null
@@ -1,235 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ Federation.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_federation_queue_test).
-
--compile(export_all).
--include("rabbit_federation.hrl").
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
--import(rabbit_federation_util, [name/1]).
--import(rabbit_test_util, [enable_plugin/2, disable_plugin/2]).
-
--import(rabbit_federation_test_util,
-        [expect/3,
-         set_upstream/3, clear_upstream/2, set_policy/4, clear_policy/2,
-         set_policy_upstream/4, set_policy_upstreams/3,
-         disambiguate/1, single_cfg/0]).
-
--define(UPSTREAM_DOWNSTREAM, [q(<<"upstream">>),
-                              q(<<"fed.downstream">>)]).
-
-%% Used in restart_upstream_test
--define(HARE, {"hare", 5673}).
-
-simple_test() ->
-    with_ch(
-      fun (Ch) ->
-              expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>)
-      end, [q(<<"upstream">>),
-            q(<<"fed.downstream">>)]).
-
-multiple_upstreams_test() ->
-    with_ch(
-      fun (Ch) ->
-              expect_federation(Ch, <<"upstream">>, <<"fed12.downstream">>),
-              expect_federation(Ch, <<"upstream2">>, <<"fed12.downstream">>)
-      end, [q(<<"upstream">>),
-            q(<<"upstream2">>),
-            q(<<"fed12.downstream">>)]).
-
-multiple_downstreams_test() ->
-    with_ch(
-      fun (Ch) ->
-              expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>),
-              expect_federation(Ch, <<"upstream">>, <<"fed.downstream2">>)
-      end, [q(<<"upstream">>),
-            q(<<"fed.downstream">>),
-            q(<<"fed.downstream2">>)]).
-
-bidirectional_test() ->
-    with_ch(
-      fun (Ch) ->
-              publish_expect(Ch, <<>>, <<"one">>, <<"one">>, <<"first one">>),
-              publish_expect(Ch, <<>>, <<"two">>, <<"two">>, <<"first two">>),
-              Seq = lists:seq(1, 100),
-              [publish(Ch, <<>>, <<"one">>, <<"bulk">>) || _ <- Seq],
-              [publish(Ch, <<>>, <<"two">>, <<"bulk">>) || _ <- Seq],
-              expect(Ch, <<"one">>, repeat(150, <<"bulk">>)),
-              expect(Ch, <<"two">>, repeat(50, <<"bulk">>)),
-              expect_empty(Ch, <<"one">>),
-              expect_empty(Ch, <<"two">>)
-      end, [q(<<"one">>),
-            q(<<"two">>)]).
-
-dynamic_reconfiguration_test() ->
-    Cfg = single_cfg(),
-    with_ch(
-      fun (Ch) ->
-              expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>),
-
-              %% Test that clearing connections works
-              clear_upstream(Cfg, <<"localhost">>),
-              expect_no_federation(Ch, <<"upstream">>, <<"fed.downstream">>),
-
-              %% Test that readding them and changing them works
-              set_upstream(Cfg, <<"localhost">>, <<"amqp://localhost">>),
-              %% Do it twice so we at least hit the no-restart optimisation
-              set_upstream(Cfg, <<"localhost">>, <<"amqp://">>),
-              set_upstream(Cfg, <<"localhost">>, <<"amqp://">>),
-              expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>)
-      end, [q(<<"upstream">>),
-            q(<<"fed.downstream">>)]).
-
-federate_unfederate_test() ->
-    Cfg = single_cfg(),
-    with_ch(
-      fun (Ch) ->
-              expect_no_federation(Ch, <<"upstream">>, <<"downstream">>),
-              expect_no_federation(Ch, <<"upstream2">>, <<"downstream">>),
-
-              %% Federate it
-              set_policy(Cfg, <<"dyn">>, <<"^downstream\$">>, <<"upstream">>),
-              expect_federation(Ch, <<"upstream">>, <<"downstream">>),
-              expect_no_federation(Ch, <<"upstream2">>, <<"downstream">>),
-
-              %% Change policy - upstream changes
-              set_policy(Cfg, <<"dyn">>, <<"^downstream\$">>, <<"upstream2">>),
-              expect_no_federation(Ch, <<"upstream">>, <<"downstream">>),
-              expect_federation(Ch, <<"upstream2">>, <<"downstream">>),
-
-              %% Unfederate it - no federation
-              clear_policy(Cfg, <<"dyn">>),
-              expect_no_federation(Ch, <<"upstream2">>, <<"downstream">>)
-      end, [q(<<"upstream">>),
-            q(<<"upstream2">>),
-            q(<<"downstream">>)]).
-
-dynamic_plugin_stop_start_test() ->
-    Cfg = single_cfg(),
-    Q1 = <<"dyn.q1">>,
-    Q2 = <<"dyn.q2">>,
-    U = <<"upstream">>,
-    with_ch(
-      fun (Ch) ->
-              set_policy(Cfg, <<"dyn">>, <<"^dyn\\.">>, U),
-              %% Declare federated queue - get link
-              expect_federation(Ch, U, Q1),
-
-              %% Disable plugin, link goes
-              ok = disable_plugin(Cfg, "rabbitmq_federation"),
-              expect_no_federation(Ch, U, Q1),
-
-              %% Create exchange then re-enable plugin, links appear
-              declare_queue(Ch, q(Q2)),
-              ok = enable_plugin(Cfg, "rabbitmq_federation"),
-              expect_federation(Ch, U, Q1),
-              expect_federation(Ch, U, Q2),
-
-              clear_policy(Cfg, <<"dyn">>),
-              expect_no_federation(Ch, U, Q1),
-              expect_no_federation(Ch, U, Q2),
-              delete_queue(Ch, Q2)
-      end, [q(Q1), q(U)]).
-
-%% Downstream: rabbit-test, port 5672
-%% Upstream:   hare,        port 5673
-
-restart_upstream_with() -> disambiguate(start_ab).
-restart_upstream([Rabbit, Hare]) ->
-    set_policy_upstream(Rabbit, <<"^test$">>, <<"amqp://localhost:5673">>, []),
-
-    {_, Downstream} = rabbit_test_util:connect(Rabbit),
-    {_, Upstream}   = rabbit_test_util:connect(Hare),
-
-    declare_queue(Upstream, q(<<"test">>)),
-    declare_queue(Downstream, q(<<"test">>)),
-    Seq = lists:seq(1, 100),
-    [publish(Upstream, <<>>, <<"test">>, <<"bulk">>) || _ <- Seq],
-    expect(Upstream, <<"test">>, repeat(25, <<"bulk">>)),
-    expect(Downstream, <<"test">>, repeat(25, <<"bulk">>)),
-
-    Hare2 = rabbit_test_configs:restart_node(Hare),
-    {_, Upstream2} = rabbit_test_util:connect(Hare2),
-
-    expect(Upstream2, <<"test">>, repeat(25, <<"bulk">>)),
-    expect(Downstream, <<"test">>, repeat(25, <<"bulk">>)),
-    expect_empty(Upstream2, <<"test">>),
-    expect_empty(Downstream, <<"test">>),
-
-    ok.
-
-upstream_has_no_federation_test() ->
-    %% TODO
-    ok.
-
-%%----------------------------------------------------------------------------
-
-with_ch(Fun, Qs) ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    declare_all(Ch, Qs),
-    timer:sleep(1000), %% Time for statuses to get updated
-    rabbit_federation_test_util:assert_status(
-      Qs, {queue, upstream_queue}),
-    Fun(Ch),
-    delete_all(Ch, Qs),
-    amqp_connection:close(Conn),
-    ok.
-
-declare_all(Ch, Qs) -> [declare_queue(Ch, Q) || Q <- Qs].
-delete_all(Ch, Qs) ->
-    [delete_queue(Ch, Q) || #'queue.declare'{queue = Q} <- Qs].
-
-declare_queue(Ch, Q) ->
-    amqp_channel:call(Ch, Q).
-
-delete_queue(Ch, Q) ->
-    amqp_channel:call(Ch, #'queue.delete'{queue = Q}).
-
-q(Name) ->
-    #'queue.declare'{queue   = Name,
-                     durable = true}.
-
-repeat(Count, Item) -> [Item || _ <- lists:seq(1, Count)].
-
-%%----------------------------------------------------------------------------
-
-publish(Ch, X, Key, Payload) when is_binary(Payload) ->
-    publish(Ch, X, Key, #amqp_msg{payload = Payload});
-
-publish(Ch, X, Key, Msg = #amqp_msg{}) ->
-    amqp_channel:call(Ch, #'basic.publish'{exchange    = X,
-                                           routing_key = Key}, Msg).
-
-publish_expect(Ch, X, Key, Q, Payload) ->
-    publish(Ch, X, Key, Payload),
-    expect(Ch, Q, [Payload]).
-
-%% Doubled due to our strange basic.get behaviour.
-expect_empty(Ch, Q) ->
-    rabbit_federation_test_util:expect_empty(Ch, Q),
-    rabbit_federation_test_util:expect_empty(Ch, Q).
-
-expect_federation(Ch, UpstreamQ, DownstreamQ) ->
-    publish_expect(Ch, <<>>, UpstreamQ, DownstreamQ, <<"HELLO">>).
-
-expect_no_federation(Ch, UpstreamQ, DownstreamQ) ->
-    publish(Ch, <<>>, UpstreamQ, <<"HELLO">>),
-    expect_empty(Ch, DownstreamQ),
-    expect(Ch, UpstreamQ, [<<"HELLO">>]).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_test_util.erl
deleted file mode 100644 (file)
index d70042e..0000000
+++ /dev/null
@@ -1,158 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ Federation.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_federation_test_util).
-
--include("rabbit_federation.hrl").
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--compile(export_all).
-
--import(rabbit_misc, [pget/2]).
-
-expect(Ch, Q, Fun) when is_function(Fun) ->
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue  = Q,
-                                                no_ack = true}, self()),
-    receive
-        #'basic.consume_ok'{consumer_tag = CTag} -> ok
-    end,
-    Fun(),
-    amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag});
-
-expect(Ch, Q, Payloads) ->
-    expect(Ch, Q, fun() -> expect(Payloads) end).
-
-expect([]) ->
-    ok;
-expect(Payloads) ->
-    receive
-        {#'basic.deliver'{}, #amqp_msg{payload = Payload}} ->
-            case lists:member(Payload, Payloads) of
-                true  -> expect(Payloads -- [Payload]);
-                false -> throw({expected, Payloads, actual, Payload})
-            end
-    end.
-
-expect_empty(Ch, Q) ->
-    ?assertMatch(#'basic.get_empty'{},
-                 amqp_channel:call(Ch, #'basic.get'{ queue = Q })).
-
-set_upstream(Cfg, Name, URI) ->
-    set_upstream(Cfg, Name, URI, []).
-
-set_upstream(Cfg, Name, URI, Extra) ->
-    rabbit_test_util:set_param(Cfg, <<"federation-upstream">>, Name,
-                               [{<<"uri">>, URI} | Extra]).
-
-clear_upstream(Cfg, Name) ->
-    rabbit_test_util:clear_param(Cfg, <<"federation-upstream">>, Name).
-
-set_upstream_set(Cfg, Name, Set) ->
-    rabbit_test_util:set_param(
-      Cfg, <<"federation-upstream-set">>, Name,
-      [[{<<"upstream">>, UStream} | Extra] || {UStream, Extra} <- Set]).
-
-set_policy(Cfg, Name, Pattern, UpstreamSet) ->
-    rabbit_test_util:set_policy(Cfg, Name, Pattern, <<"all">>,
-                                [{<<"federation-upstream-set">>, UpstreamSet}]).
-
-set_policy1(Cfg, Name, Pattern, Upstream) ->
-    rabbit_test_util:set_policy(Cfg, Name, Pattern, <<"all">>,
-                                [{<<"federation-upstream">>, Upstream}]).
-
-clear_policy(Cfg, Name) ->
-    rabbit_test_util:clear_policy(Cfg, Name).
-
-set_policy_upstream(Cfg, Pattern, URI, Extra) ->
-    set_policy_upstreams(Cfg, Pattern, [{URI, Extra}]).
-
-set_policy_upstreams(Cfg, Pattern, URIExtras) ->
-    put(upstream_num, 1),
-    [set_upstream(Cfg, gen_upstream_name(), URI, Extra)
-     || {URI, Extra} <- URIExtras],
-    set_policy(Cfg, Pattern, Pattern, <<"all">>).
-
-gen_upstream_name() ->
-    list_to_binary("upstream-" ++ integer_to_list(next_upstream_num())).
-
-next_upstream_num() ->
-    R = get(upstream_num) + 1,
-    put (upstream_num, R),
-    R.
-
-%% Make sure that even though multiple nodes are in a single
-%% distributed system, we still keep all our process groups separate.
-disambiguate(Rest) ->
-    [Rest,
-     fun (Cfgs) ->
-             [rpc:call(pget(node, Cfg), application, set_env,
-                       [rabbitmq_federation, pgroup_name_cluster_id, true])
-              || Cfg <- Cfgs],
-             Cfgs
-     end].
-
-no_plugins(Cfg) ->
-    [{K, case K of
-             plugins -> none;
-             _       -> V
-         end} || {K, V} <- Cfg].
-
-%% "fake" cfg to let us use various utility functions when running
-%% in-broker tests
-single_cfg() ->
-    [{nodename, 'rabbit-test'},
-     {node,     rabbit_nodes:make('rabbit-test')},
-     {port,     5672}].
-
-%%----------------------------------------------------------------------------
-
-assert_status(XorQs, Names) ->
-    Links = lists:append([links(XorQ) || XorQ <- XorQs]),
-    Remaining = lists:foldl(fun (Link, Status) ->
-                                    assert_link_status(Link, Status, Names)
-                            end, rabbit_federation_status:status(), Links),
-    ?assertEqual([], Remaining),
-    ok.
-
-assert_link_status({DXorQNameBin, UpstreamName, UXorQNameBin}, Status,
-                   {TypeName, UpstreamTypeName}) ->
-    {This, Rest} = lists:partition(
-                     fun(St) ->
-                             pget(upstream, St) =:= UpstreamName andalso
-                                 pget(TypeName, St) =:= DXorQNameBin andalso
-                                 pget(UpstreamTypeName, St) =:= UXorQNameBin
-                     end, Status),
-    ?assertMatch([_], This),
-    Rest.
-
-links(#'exchange.declare'{exchange = Name}) ->
-    case rabbit_policy:get(<<"federation-upstream-set">>, xr(Name)) of
-        undefined -> [];
-        Set       -> X = #exchange{name = xr(Name)},
-                     [{Name, U#upstream.name, U#upstream.exchange_name} ||
-                         U <- rabbit_federation_upstream:from_set(Set, X)]
-    end;
-links(#'queue.declare'{queue = Name}) ->
-    case rabbit_policy:get(<<"federation-upstream-set">>, qr(Name)) of
-        undefined -> [];
-        Set       -> Q = #amqqueue{name = qr(Name)},
-                     [{Name, U#upstream.name, U#upstream.queue_name} ||
-                         U <- rabbit_federation_upstream:from_set(Set, Q)]
-    end.
-
-xr(Name) -> rabbit_misc:r(<<"/">>, exchange, Name).
-qr(Name) -> rabbit_misc:r(<<"/">>, queue, Name).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_unit_test.erl b/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_unit_test.erl
deleted file mode 100644 (file)
index 76d23b8..0000000
+++ /dev/null
@@ -1,107 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ Federation.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_federation_unit_test).
-
--define(US_NAME, <<"upstream">>).
--define(DS_NAME, <<"fed.downstream">>).
-
--include("rabbit_federation.hrl").
--include_lib("eunit/include/eunit.hrl").
--include_lib("rabbit_common/include/rabbit.hrl").
-
-%% Test that we apply binding changes in the correct order even when
-%% they arrive out of order.
-serialisation_test() ->
-    with_exchanges(
-      fun(X) ->
-              [B1, B2, B3] = [b(K) || K <- [<<"1">>, <<"2">>, <<"3">>]],
-              remove_bindings(4, X, [B1, B3]),
-              add_binding(5, X, B1),
-              add_binding(1, X, B1),
-              add_binding(2, X, B2),
-              add_binding(3, X, B3),
-              %% List of lists because one for each link
-              Keys = rabbit_federation_exchange_link:list_routing_keys(
-                       X#exchange.name),
-              ?assertEqual([[<<"1">>, <<"2">>]], Keys)
-      end).
-
-with_exchanges(Fun) ->
-    rabbit_exchange:declare(r(?US_NAME), fanout, false, false, false, []),
-    X = rabbit_exchange:declare(r(?DS_NAME), fanout, false, false, false, []),
-    Fun(X),
-    %% Delete downstream first or it will recreate the upstream
-    rabbit_exchange:delete(r(?DS_NAME), false),
-    rabbit_exchange:delete(r(?US_NAME), false),
-    ok.
-
-add_binding(Ser, X, B) ->
-    rabbit_federation_exchange:add_binding(transaction, X, B),
-    rabbit_federation_exchange:add_binding(Ser, X, B).
-
-remove_bindings(Ser, X, Bs) ->
-    rabbit_federation_exchange:remove_bindings(transaction, X, Bs),
-    rabbit_federation_exchange:remove_bindings(Ser, X, Bs).
-
-r(Name) -> rabbit_misc:r(<<"/">>, exchange, Name).
-
-b(Key) ->
-    #binding{source = ?DS_NAME, destination = <<"whatever">>,
-             key = Key, args = []}.
-
-scratch_space_test() ->
-    A = <<"A">>,
-    B = <<"B">>,
-    DB = rabbit_federation_db,
-    with_exchanges(
-      fun(#exchange{name = N}) ->
-              DB:set_active_suffix(N, upstream(x), A),
-              DB:set_active_suffix(N, upstream(y), A),
-              DB:prune_scratch(N, [upstream(y), upstream(z)]),
-              DB:set_active_suffix(N, upstream(y), B),
-              DB:set_active_suffix(N, upstream(z), A),
-              ?assertEqual(none, DB:get_active_suffix(N, upstream(x), none)),
-              ?assertEqual(B,    DB:get_active_suffix(N, upstream(y), none)),
-              ?assertEqual(A,    DB:get_active_suffix(N, upstream(z), none))
-      end).
-
-upstream(UpstreamName) ->
-    #upstream{name          = atom_to_list(UpstreamName),
-              exchange_name = <<"upstream">>}.
-
-remove_credentials_test() ->
-    Test0 = fun (In, Exp) ->
-                    Act = rabbit_federation_upstream:remove_credentials(In),
-                    ?assertEqual(Exp, Act)
-            end,
-    Cat = fun (Bs) ->
-                  list_to_binary(lists:append([binary_to_list(B) || B <- Bs]))
-          end,
-    Test = fun (Scheme, Rest) ->
-                   Exp = Cat([Scheme, Rest]),
-                   Test0(Exp,                                   Exp),
-                   Test0(Cat([Scheme, <<"user@">>, Rest]),      Exp),
-                   Test0(Cat([Scheme, <<"user:pass@">>, Rest]), Exp)
-           end,
-    Test(<<"amqp://">>,  <<"">>),
-    Test(<<"amqp://">>,  <<"localhost">>),
-    Test(<<"amqp://">>,  <<"localhost/">>),
-    Test(<<"amqp://">>,  <<"localhost/foo">>),
-    Test(<<"amqp://">>,  <<"localhost:5672">>),
-    Test(<<"amqp://">>,  <<"localhost:5672/foo">>),
-    Test(<<"amqps://">>, <<"localhost:5672/%2f">>),
-    ok.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-management-agent/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-management-agent/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile b/rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk b/rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk
deleted file mode 100644 (file)
index 702019b..0000000
+++ /dev/null
@@ -1 +0,0 @@
-DEPS:=rabbitmq-erlang-client
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk
deleted file mode 100644 (file)
index 2a0757c..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-management
-
-CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f)
-define construct_app_commands
-       cp -r $(PACKAGE_DIR)/priv $(APP_DIR)
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-management/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml
deleted file mode 100644 (file)
index 2d93510..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-sudo: false
-language: erlang
-notifications:
-  email:
-    - alerts@rabbitmq.com
-addons:
-  apt:
-    packages:
-      - xsltproc
-      - python3
-otp_release:
-  - "R16B03-1"
-  - "17.5"
-  - "18.0"
-install:
-  - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi
-  - cd $HOME/rabbitmq-public-umbrella
-  - make co
-  - make up
-before_script:
-  - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG"
-  - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]}
-  - rm -rf ${TEST_DIR}
-  - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR}
-  - cd ${TEST_DIR}
-script: make test
-before_cache:
-  - rm -rf ${TEST_DIR}
-  - cd $HOME
-cache:
-  apt: true
-  directories:
-    - $HOME/rabbitmq-public-umbrella
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-management/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/Makefile b/rabbitmq-server/plugins-src/rabbitmq-management/Makefile
deleted file mode 100644 (file)
index 559ffc8..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-include ../umbrella.mk
-
-RABBITMQCTL=../rabbitmq-server/scripts/rabbitmqctl
-TEST_TMPDIR=$(TMPDIR)/rabbitmq-test
-OTHER_NODE=undefined
-OTHER_PORT=undefined
-
-start-other-node:
-       rm -f $(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-pid
-       RABBITMQ_MNESIA_BASE=$(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-mnesia \
-       RABBITMQ_PID_FILE=$(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-pid \
-       RABBITMQ_LOG_BASE=$(TEST_TMPDIR)/log \
-       RABBITMQ_NODENAME=$(OTHER_NODE) \
-       RABBITMQ_NODE_PORT=$(OTHER_PORT) \
-       RABBITMQ_CONFIG_FILE=etc/$(OTHER_NODE) \
-       RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \
-       RABBITMQ_PLUGINS_EXPAND_DIR=$(TEST_TMPDIR)/$(OTHER_NODE)-plugins-expand \
-       ../rabbitmq-server/scripts/rabbitmq-server >/tmp/$(OTHER_NODE).out 2>/tmp/$(OTHER_NODE).err &
-       $(RABBITMQCTL) -n $(OTHER_NODE) wait $(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-pid
-
-cluster-other-node:
-       $(RABBITMQCTL) -n $(OTHER_NODE) stop_app
-       $(RABBITMQCTL) -n $(OTHER_NODE) reset
-       $(RABBITMQCTL) -n $(OTHER_NODE) join_cluster rabbit-test@`hostname -s`
-       $(RABBITMQCTL) -n $(OTHER_NODE) start_app
-
-stop-other-node:
-       $(RABBITMQCTL) -n $(OTHER_NODE) stop
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/README b/rabbitmq-server/plugins-src/rabbitmq-management/README
deleted file mode 100644 (file)
index 458a63c..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-Generic build instructions are at:
-        http://www.rabbitmq.com/plugin-development.html
-
-When installed, point your broswer at:
-
-http://<server>:15672/
-
-and log in with AMQP credentials (guest/guest by default).
-
-Documentation for the HTTP API can be found at
-
-http://<server>:15672/api/
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/etc/bunny.config b/rabbitmq-server/plugins-src/rabbitmq-management/etc/bunny.config
deleted file mode 100644 (file)
index 4afc9d5..0000000
+++ /dev/null
@@ -1 +0,0 @@
-[{rabbitmq_management, [{listener,[{port, 15674}]}]}].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/etc/hare.config b/rabbitmq-server/plugins-src/rabbitmq-management/etc/hare.config
deleted file mode 100644 (file)
index bd8b3d7..0000000
+++ /dev/null
@@ -1 +0,0 @@
-[{rabbitmq_management, [{listener,[{port, 15673}]}]}].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/etc/rabbit-test.config b/rabbitmq-server/plugins-src/rabbitmq-management/etc/rabbit-test.config
deleted file mode 100644 (file)
index 6b9bbe2..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-%% We test sample retention separately in rabbit_mgmt_test_db_unit,
-%% but for rabbit_mgmt_test_db we want to make sure samples never
-%% expire.
-[{rabbitmq_management, [{sample_retention_policies,
-                         %% List of {MaxAgeSecs, IfTimestampDivisibleBySecs}
-                         [{global,   [{10000000000000, 1}]},
-                          {basic,    [{10000000000000, 1}]},
-                          {detailed, [{10000000000000, 1}]}]},
-                        %% We're going to test this, so enable it!
-                        {rates_mode, detailed}
-                       ]}
-].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/package.mk b/rabbitmq-server/plugins-src/rabbitmq-management/package.mk
deleted file mode 100644 (file)
index 3d0817a..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-web-dispatch webmachine-wrapper rabbitmq-server rabbitmq-erlang-client rabbitmq-management-agent rabbitmq-test
-FILTER:=all
-COVER:=false
-WITH_BROKER_TEST_COMMANDS:=rabbit_test_runner:run_in_broker(\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\")
-WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/etc/rabbit-test
-STANDALONE_TEST_COMMANDS:=rabbit_test_runner:run_multi(\"$(UMBRELLA_BASE_DIR)/rabbitmq-server\",\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\",$(COVER),\"/tmp/rabbitmq-multi-node/plugins\")
-WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/src/rabbitmqadmin-test-wrapper.sh
-
-CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f) $(PACKAGE_DIR)/bin/rabbitmqadmin
-define construct_app_commands
-       cp -r $(PACKAGE_DIR)/priv $(APP_DIR)
-       sed 's/%%VSN%%/$(VERSION)/' $(PACKAGE_DIR)/bin/rabbitmqadmin > $(APP_DIR)/priv/www/cli/rabbitmqadmin
-endef
-
-# The tests require erlang/OTP R14 (httpc issue)
-$(PACKAGE_DIR)+pre-test::
-       if [ "`erl -noshell -eval 'io:format([list_to_integer(X) || X <- string:tokens(erlang:system_info(version), ".")] >= [5,8]), halt().'`" != true ] ; then \
-         echo "Need Erlang/OTP R14A or higher" ; \
-         exit 1 ; \
-       fi
-       rm -rf /tmp/rabbitmq-multi-node/plugins
-       mkdir -p /tmp/rabbitmq-multi-node/plugins/plugins
-       cp -p $(UMBRELLA_BASE_DIR)/rabbitmq-management/dist/*.ez /tmp/rabbitmq-multi-node/plugins/plugins
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js b/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js
deleted file mode 100644 (file)
index 9733f01..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-// -- Sammy -- /sammy.js
-// http://code.quirkey.com/sammy
-// Version: 0.6.0
-// Built: Wed Sep 01 23:12:46 -0700 2010
-(function(g){var m,f="([^/]+)",i=/:([\w\d]+)/g,j=/\?([^#]*)$/,b=function(n){return Array.prototype.slice.call(n)},c=function(n){return Object.prototype.toString.call(n)==="[object Function]"},k=function(n){return Object.prototype.toString.call(n)==="[object Array]"},h=decodeURIComponent,e=function(n){return n.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;")},l=function(n){return function(o,p){return this.route.apply(this,[n,o,p])}},a={},d=[];m=function(){var o=b(arguments),p,n;m.apps=m.apps||{};if(o.length===0||o[0]&&c(o[0])){return m.apply(m,["body"].concat(o))}else{if(typeof(n=o.shift())=="string"){p=m.apps[n]||new m.Application();p.element_selector=n;if(o.length>0){g.each(o,function(q,r){p.use(r)})}if(p.element_selector!=n){delete m.apps[n]}m.apps[p.element_selector]=p;return p}}};m.VERSION="0.6.0";m.addLogger=function(n){d.push(n)};m.log=function(){var n=b(arguments);n.unshift("["+Date()+"]");g.each(d,function(p,o){o.apply(m,n)})};if(typeof window.console!="undefined"){if(c(console.log.apply)){m.addLogger(function(){window.console.log.apply(console,arguments)})}else{m.addLogger(function(){window.console.log(arguments)})}}else{if(typeof console!="undefined"){m.addLogger(function(){console.log.apply(console,arguments)})}}g.extend(m,{makeArray:b,isFunction:c,isArray:k});m.Object=function(n){return g.extend(this,n||{})};g.extend(m.Object.prototype,{escapeHTML:e,h:e,toHash:function(){var n={};g.each(this,function(p,o){if(!c(o)){n[p]=o}});return n},toHTML:function(){var n="";g.each(this,function(p,o){if(!c(o)){n+="<strong>"+p+"</strong> "+o+"<br />"}});return n},keys:function(n){var o=[];for(var p in this){if(!c(this[p])||!n){o.push(p)}}return o},has:function(n){return this[n]&&g.trim(this[n].toString())!=""},join:function(){var o=b(arguments);var n=o.shift();return o.join(n)},log:function(){m.log.apply(m,arguments)},toString:function(n){var o=[];g.each(this,function(q,p){if(!c(p)||n){o.push('"'+q+'": '+p.toString())}});return"Sammy.Object: {"+o.join(",")+"}"}});m.HashLocationProxy=function(o,n){this.app=o;this.is_native=false;this._startPolling(n)};m.HashLocationProxy.prototype={bind:function(){var n=this,o=this.app;g(window).bind("hashchange."+this.app.eventNamespace(),function(q,p){if(n.is_native===false&&!p){m.log("native hash change exists, using");n.is_native=true;clearInterval(m.HashLocationProxy._interval)}o.trigger("location-changed")});if(!m.HashLocationProxy._bindings){m.HashLocationProxy._bindings=0}m.HashLocationProxy._bindings++},unbind:function(){g(window).unbind("hashchange."+this.app.eventNamespace());m.HashLocationProxy._bindings--;if(m.HashLocationProxy._bindings<=0){clearInterval(m.HashLocationProxy._interval)}},getLocation:function(){var n=window.location.toString().match(/^[^#]*(#.+)$/);return n?n[1]:""},setLocation:function(n){return(window.location=n)},_startPolling:function(p){var o=this;if(!m.HashLocationProxy._interval){if(!p){p=10}var n=function(){current_location=o.getLocation();if(!m.HashLocationProxy._last_location||current_location!=m.HashLocationProxy._last_location){setTimeout(function(){g(window).trigger("hashchange",[true])},13)}m.HashLocationProxy._last_location=current_location};n();m.HashLocationProxy._interval=setInterval(n,p)}}};m.Application=function(n){var o=this;this.routes={};this.listeners=new m.Object({});this.arounds=[];this.befores=[];this.namespace=(new Date()).getTime()+"-"+parseInt(Math.random()*1000,10);this.context_prototype=function(){m.EventContext.apply(this,arguments)};this.context_prototype.prototype=new m.EventContext();if(c(n)){n.apply(this,[this])}if(!this._location_proxy){this.setLocationProxy(new m.HashLocationProxy(this,this.run_interval_every))}if(this.debug){this.bindToAllEvents(function(q,p){o.log(o.toString(),q.cleaned_type,p||{})})}};m.Application.prototype=g.extend({},m.Object.prototype,{ROUTE_VERBS:["get","post","put","delete"],APP_EVENTS:["run","unload","lookup-route","run-route","route-found","event-context-before","event-context-after","changed","error","check-form-submission","redirect"],_last_route:null,_location_proxy:null,_running:false,element_selector:"body",debug:false,raise_errors:false,run_interval_every:50,template_engine:null,toString:function(){return"Sammy.Application:"+this.element_selector},$element:function(){return g(this.element_selector)},use:function(){var n=b(arguments),p=n.shift(),o=p||"";try{n.unshift(this);if(typeof p=="string"){o="Sammy."+p;p=m[p]}p.apply(this,n)}catch(q){if(typeof p==="undefined"){this.error("Plugin Error: called use() but plugin ("+o.toString()+") is not defined",q)}else{if(!c(p)){this.error("Plugin Error: called use() but '"+o.toString()+"' is not a function",q)}else{this.error("Plugin Error",q)}}}return this},setLocationProxy:function(n){var o=this._location_proxy;this._location_proxy=n;if(this.isRunning()){if(o){o.unbind()}this._location_proxy.bind()}},route:function(q,o,s){var p=this,r=[],n;if(!s&&c(o)){o=q;s=o;q="any"}q=q.toLowerCase();if(o.constructor==String){i.lastIndex=0;while((path_match=i.exec(o))!==null){r.push(path_match[1])}o=new RegExp("^"+o.replace(i,f)+"$")}if(typeof s=="string"){s=p[s]}n=function(t){var u={verb:t,path:o,callback:s,param_names:r};p.routes[t]=p.routes[t]||[];p.routes[t].push(u)};if(q==="any"){g.each(this.ROUTE_VERBS,function(u,t){n(t)})}else{n(q)}return this},get:l("get"),post:l("post"),put:l("put"),del:l("delete"),any:l("any"),mapRoutes:function(o){var n=this;g.each(o,function(p,q){n.route.apply(n,q)});return this},eventNamespace:function(){return["sammy-app",this.namespace].join("-")},bind:function(n,p,r){var q=this;if(typeof r=="undefined"){r=p}var o=function(){var u,s,t;u=arguments[0];t=arguments[1];if(t&&t.context){s=t.context;delete t.context}else{s=new q.context_prototype(q,"bind",u.type,t,u.target)}u.cleaned_type=u.type.replace(q.eventNamespace(),"");r.apply(s,[u,t])};if(!this.listeners[n]){this.listeners[n]=[]}this.listeners[n].push(o);if(this.isRunning()){this._listen(n,o)}return this},trigger:function(n,o){this.$element().trigger([n,this.eventNamespace()].join("."),[o]);return this},refresh:function(){this.last_location=null;this.trigger("location-changed");return this},before:function(n,o){if(c(n)){o=n;n={}}this.befores.push([n,o]);return this},after:function(n){return this.bind("event-context-after",n)},around:function(n){this.arounds.push(n);return this},isRunning:function(){return this._running},helpers:function(n){g.extend(this.context_prototype.prototype,n);return this},helper:function(n,o){this.context_prototype.prototype[n]=o;return this},run:function(n){if(this.isRunning()){return false}var o=this;g.each(this.listeners.toHash(),function(p,q){g.each(q,function(s,r){o._listen(p,r)})});this.trigger("run",{start_url:n});this._running=true;this.last_location=null;if(this.getLocation()==""&&typeof n!="undefined"){this.setLocation(n)}this._checkLocation();this._location_proxy.bind();this.bind("location-changed",function(){o._checkLocation()});this.bind("submit",function(q){var p=o._checkFormSubmission(g(q.target).closest("form"));return(p===false)?q.preventDefault():false});g(window).bind("beforeunload",function(){o.unload()});return this.trigger("changed")},unload:function(){if(!this.isRunning()){return false}var n=this;this.trigger("unload");this._location_proxy.unbind();this.$element().unbind("submit").removeClass(n.eventNamespace());g.each(this.listeners.toHash(),function(o,p){g.each(p,function(r,q){n._unlisten(o,q)})});this._running=false;return this},bindToAllEvents:function(o){var n=this;g.each(this.APP_EVENTS,function(p,q){n.bind(q,o)});g.each(this.listeners.keys(true),function(q,p){if(n.APP_EVENTS.indexOf(p)==-1){n.bind(p,o)}});return this},routablePath:function(n){return n.replace(j,"")},lookupRoute:function(q,o){var p=this,n=false;this.trigger("lookup-route",{verb:q,path:o});if(typeof this.routes[q]!="undefined"){g.each(this.routes[q],function(s,r){if(p.routablePath(o).match(r.path)){n=r;return false}})}return n},runRoute:function(p,B,r,u){var q=this,z=this.lookupRoute(p,B),o,x,s,w,A,y,v,n;this.log("runRoute",[p,B].join(" "));this.trigger("run-route",{verb:p,path:B,params:r});if(typeof r=="undefined"){r={}}g.extend(r,this._parseQueryString(B));if(z){this.trigger("route-found",{route:z});if((path_params=z.path.exec(this.routablePath(B)))!==null){path_params.shift();g.each(path_params,function(C,D){if(z.param_names[C]){r[z.param_names[C]]=h(D)}else{if(!r.splat){r.splat=[]}r.splat.push(h(D))}})}o=new this.context_prototype(this,p,B,r,u);s=this.arounds.slice(0);A=this.befores.slice(0);v=[o].concat(r.splat);x=function(){var C;while(A.length>0){y=A.shift();if(q.contextMatchesOptions(o,y[0])){C=y[1].apply(o,[o]);if(C===false){return false}}}q.last_route=z;o.trigger("event-context-before",{context:o});C=z.callback.apply(o,v);o.trigger("event-context-after",{context:o});return C};g.each(s.reverse(),function(C,D){var E=x;x=function(){return D.apply(o,[E])}});try{n=x()}catch(t){this.error(["500 Error",p,B].join(" "),t)}return n}else{return this.notFound(p,B)}},contextMatchesOptions:function(q,s,o){var p=s;if(typeof p==="undefined"||p=={}){return true}if(typeof o==="undefined"){o=true}if(typeof p==="string"||c(p.test)){p={path:p}}if(p.only){return this.contextMatchesOptions(q,p.only,true)}else{if(p.except){return this.contextMatchesOptions(q,p.except,false)}}var n=true,r=true;if(p.path){if(c(p.path.test)){n=p.path.test(q.path)}else{n=(p.path.toString()===q.path)}}if(p.verb){r=p.verb===q.verb}return o?(r&&n):!(r&&n)},getLocation:function(){return this._location_proxy.getLocation()},setLocation:function(n){return this._location_proxy.setLocation(n)},swap:function(n){return this.$element().html(n)},templateCache:function(n,o){if(typeof o!="undefined"){return a[n]=o}else{return a[n]}},notFound:function(p,o){var n=this.error(["404 Not Found",p,o].join(" "));return(p==="get")?n:true},error:function(o,n){if(!n){n=new Error()}n.message=[o,n.message].join(" ");this.trigger("error",{message:n.message,error:n});if(this.raise_errors){throw (n)}else{this.log(n.message,n)}},_checkLocation:function(){var n,o;n=this.getLocation();if(n!=this.last_location){this.last_location=n;o=this.runRoute("get",n)}return o},_getFormVerb:function(o){var n=g(o),p;$_method=n.find('input[name="_method"]');if($_method.length>0){p=$_method.val()}if(!p){p=n[0].getAttribute("method")}return g.trim(p.toString().toLowerCase())},_checkFormSubmission:function(p){var n,q,s,r,o;this.trigger("check-form-submission",{form:p});n=g(p);q=n.attr("action");s=this._getFormVerb(n);if(!s||s==""){s="get"}this.log("_checkFormSubmission",n,q,s);if(s==="get"){this.setLocation(q+"?"+n.serialize());o=false}else{r=g.extend({},this._parseFormParams(n));o=this.runRoute(s,q,r,p.get(0))}return(typeof o=="undefined")?false:o},_parseFormParams:function(n){var q={},p=n.serializeArray(),o;for(o=0;o<p.length;o++){q=this._parseParamPair(q,p[o].name,p[o].value)}return q},_parseQueryString:function(q){var s={},p,o,r,n;p=q.match(j);if(p){o=p[1].split("&");for(n=0;n<o.length;n++){r=o[n].split("=");s=this._parseParamPair(s,h(r[0]),h(r[1]))}}return s},_parseParamPair:function(p,n,o){if(p[n]){if(k(p[n])){p[n].push(o)}else{p[n]=[p[n],o]}}else{p[n]=o}return p},_listen:function(n,o){return this.$element().bind([n,this.eventNamespace()].join("."),o)},_unlisten:function(n,o){return this.$element().unbind([n,this.eventNamespace()].join("."),o)}});m.RenderContext=function(n){this.event_context=n;this.callbacks=[];this.previous_content=null;this.content=null;this.next_engine=false;this.waiting=false};g.extend(m.RenderContext.prototype,{then:function(o){if(c(o)){var n=this;if(this.waiting){this.callbacks.push(o)}else{this.wait();setTimeout(function(){var p=o.apply(n,[n.content,n.previous_content]);if(p!==false){n.next(p)}},13)}}return this},wait:function(){this.waiting=true},next:function(n){this.waiting=false;if(typeof n!=="undefined"){this.previous_content=this.content;this.content=n}if(this.callbacks.length>0){this.then(this.callbacks.shift())}},load:function(n,o,q){var p=this;return this.then(function(){var r,s;if(c(o)){q=o;o={}}else{o=g.extend({},o)}if(q){this.then(q)}if(typeof n==="string"){r=!(o.cache===false);delete o.cache;if(o.engine){p.next_engine=o.engine;delete o.engine}if(r&&(s=this.event_context.app.templateCache(n))){return s}this.wait();g.ajax(g.extend({url:n,data:{},type:"get",success:function(t){if(r){p.event_context.app.templateCache(n,t)}p.next(t)}},o));return false}else{if(n.nodeType){return n.innerHTML}if(n.selector){p.next_engine=n.attr("data-engine");if(o.clone===false){return n.remove()[0].innerHTML.toString()}else{return n[0].innerHTML.toString()}}}})},render:function(n,o,p){if(c(n)&&!o){return this.then(n)}else{return this.load(n).interpolate(o,n).then(p)}},collect:function(p,o){var n=this;return this.then(function(){var q="";g.each(p,function(r,t){var s=o.apply(n,[r,t]);q+=s;return s});return q})},renderEach:function(n,o,p,q){if(k(o)){q=p;p=o;o=null}if(!p&&k(this.content)){p=this.content}return this.load(n).collect(p,function(r,s){var t={};o?(t[o]=s):(t=s);return this.event_context.interpolate(this.content,t,n)})},interpolate:function(q,p,n){var o=this;return this.then(function(s,r){if(this.next_engine){p=this.next_engine;this.next_engine=false}var t=o.event_context.interpolate(s,q,p);return n?r+t:t})},swap:function(){return this.then(function(n){this.event_context.swap(n)}).trigger("changed",{})},appendTo:function(n){return this.then(function(o){g(n).append(o)}).trigger("changed",{})},prependTo:function(n){return this.then(function(o){g(n).prepend(o)}).trigger("changed",{})},replace:function(n){return this.then(function(o){g(n).html(o)}).trigger("changed",{})},trigger:function(n,o){return this.then(function(p){if(typeof o=="undefined"){o={content:p}}this.event_context.trigger(n,o)})}});m.EventContext=function(r,q,o,p,n){this.app=r;this.verb=q;this.path=o;this.params=new m.Object(p);this.target=n};m.EventContext.prototype=g.extend({},m.Object.prototype,{$element:function(){return this.app.$element()},engineFor:function(p){var o=this,n;if(c(p)){return p}p=p.toString();if((n=p.match(/\.([^\.]+)$/))){p=n[1]}if(p&&c(o[p])){return o[p]}if(o.app.template_engine){return this.engineFor(o.app.template_engine)}return function(q,r){return q}},interpolate:function(o,p,n){return this.engineFor(n).apply(this,[o,p])},render:function(n,o,p){return new m.RenderContext(this).render(n,o,p)},load:function(n,o,p){return new m.RenderContext(this).load(n,o,p)},partial:function(n,o){return this.render(n,o).swap()},redirect:function(){var p,o=b(arguments),n=this.app.getLocation();if(o.length>1){o.unshift("/");p=this.join.apply(this,o)}else{p=o[0]}this.trigger("redirect",{to:p});this.app.last_location=this.path;this.app.setLocation(p);if(n==p){this.app.trigger("location-changed")}},trigger:function(n,o){if(typeof o=="undefined"){o={}}if(!o.context){o.context=this}return this.app.trigger(n,o)},eventNamespace:function(){return this.app.eventNamespace()},swap:function(n){return this.app.swap(n)},notFound:function(){return this.app.notFound(this.verb,this.path)},toString:function(){return"Sammy.EventContext: "+[this.verb,this.path,this.params].join(" ")}});g.sammy=window.Sammy=m})(jQuery);
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs b/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs
deleted file mode 100644 (file)
index 3829e39..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-<h1>Channels</h1>
-<%= filter_ui(channels) %>
-<div class="updatable">
-  <%= format('channels-list', {'channels': channels, 'mode': 'standalone'}) %>
-</div>
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_db.erl b/rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_db.erl
deleted file mode 100644 (file)
index e7cb753..0000000
+++ /dev/null
@@ -1,1213 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Plugin.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_db).
-
--include("rabbit_mgmt.hrl").
--include_lib("rabbit_common/include/rabbit.hrl").
-
--behaviour(gen_server2).
-
--export([start_link/0]).
-
--export([augment_exchanges/3, augment_queues/3,
-         augment_nodes/2, augment_vhosts/2,
-         get_channel/2, get_connection/2,
-         get_all_channels/1, get_all_connections/1,
-         get_all_consumers/0, get_all_consumers/1,
-         get_overview/2, get_overview/1]).
-
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
-         code_change/3, handle_pre_hibernate/1,
-         prioritise_cast/3, prioritise_call/4, format_message_queue/2]).
-
-%% For testing
--export([override_lookups/1, reset_lookups/0]).
-
--import(rabbit_misc, [pget/3, pset/3]).
-
-%% The management database listens to events broadcast via the
-%% rabbit_event mechanism, and responds to queries from the various
-%% rabbit_mgmt_wm_* modules. It handles several kinds of events, and
-%% slices and dices them in various ways.
-%%
-%% There are three types of events coming in: created (when an object
-%% is created, containing immutable facts about it), stats (emitted on
-%% a timer, with mutable facts about the object), and deleted (just
-%% containing the object's ID). In this context "objects" means
-%% connections, channels, exchanges, queues, consumers, vhosts and
-%% nodes. Note that we do not care about users, permissions, bindings,
-%% parameters or policies.
-%%
-%% Connections and channels are identified by pids. Queues and
-%% exchanges are identified by names (which are #resource{}s). VHosts
-%% and nodes are identified by names which are binaries. And consumers
-%% are identified by {ChPid, QName, CTag}.
-%%
-%% The management database records the "created" events for
-%% connections, channels and consumers, and can thus be authoritative
-%% about those objects. For queues, exchanges and nodes we go to
-%% Mnesia to find out the immutable details of the objects.
-%%
-%% For everything other than consumers, the database can then augment
-%% these immutable details with stats, as the object changes. (We
-%% never emit anything very interesting about consumers).
-%%
-%% Stats on the inbound side are refered to as coarse- and
-%% fine-grained. Fine grained statistics are the message rates
-%% maintained by channels and associated with tuples: {publishing
-%% channel, exchange}, {publishing channel, exchange, queue} and
-%% {queue, consuming channel}. Coarse grained stats are everything
-%% else and are associated with only one object, not a tuple.
-%%
-%% Within the management database though we rearrange things a bit: we
-%% refer to basic stats, simple stats and detail stats.
-%%
-%% Basic stats are those coarse grained stats for which we do not
-%% retain a history and do not perform any calculations -
-%% e.g. connection.state or channel.prefetch_count.
-%%
-%% Simple stats are those for which we do history / calculations which
-%% are associated with one object *after aggregation* - so these might
-%% originate with coarse grained stats - e.g. connection.send_oct or
-%% queue.messages_ready. But they might also originate from fine
-%% grained stats which have been aggregated - e.g. the message rates
-%% for a vhost or queue.
-%%
-%% Finally, detailed stats are those for which we do history /
-%% calculations which are associated with two objects. These
-%% have to have originated as fine grained stats, but can still have
-%% been aggregated.
-%%
-%% Created events and basic stats are stored in ETS tables by object,
-%% looked up in an orddict in #state.tables. Simple and detailed stats
-%% (which only differ depending on how they're keyed) are stored in
-%% #state.aggregated_stats.
-%%
-%% For detailed stats we also store an index for each object referencing
-%% all the other objects that form a detailed stats key with it. This is
-%% so that we can always avoid table scanning while deleting stats and
-%% thus make sure that handling deleted events is O(n)-ish.
-%%
-%% For each key for simple and detailed stats we maintain a #stats{}
-%% record, essentially a base counter for everything that happened
-%% before the samples we have kept, and a gb_tree of {timestamp,
-%% sample} values.
-%%
-%% We also have #state.old_stats to let us calculate instantaneous
-%% rates, in order to apportion simple / detailed stats into time
-%% slices as they come in. These instantaneous rates are not returned
-%% in response to any query, the rates shown in the API are calculated
-%% at query time. old_stats contains both coarse and fine
-%% entries. Coarse entries are pruned when the corresponding object is
-%% deleted, and fine entries are pruned when the emitting channel is
-%% closed, and whenever we receive new fine stats from a channel. So
-%% it's quite close to being a cache of "the previous stats we
-%% received".
-%%
-%% We also keep a timer going, in order to prune old samples from
-%% #state.aggregated_stats.
-%%
-%% Overall the object is to do all the aggregation when events come
-%% in, and make queries be simple lookups as much as possible. One
-%% area where this does not happen is the global overview - which is
-%% aggregated from vhost stats at query time since we do not want to
-%% reveal anything about other vhosts to unprivileged users.
-
--record(state, {
-          %% "stats" for which no calculations are required
-          tables,
-          %% database of aggregated samples
-          aggregated_stats,
-          %% index for detailed aggregated_stats that have 2-tuple keys
-          aggregated_stats_index,
-          %% What the previous info item was for any given
-          %% {queue/channel/connection}
-          old_stats,
-          gc_timer,
-          gc_next_key,
-          lookups,
-          interval,
-          event_refresh_ref,
-          rates_mode}).
-
--define(FINE_STATS_TYPES, [channel_queue_stats, channel_exchange_stats,
-                           channel_queue_exchange_stats]).
--define(TABLES, [queue_stats, connection_stats, channel_stats,
-                 consumers_by_queue, consumers_by_channel,
-                 node_stats, node_node_stats]).
-
--define(DELIVER_GET, [deliver, deliver_no_ack, get, get_no_ack]).
--define(FINE_STATS, [publish, publish_in, publish_out,
-                     ack, deliver_get, confirm, return_unroutable, redeliver] ++
-            ?DELIVER_GET).
-
-%% Most come from channels as fine stats, but queues emit these directly.
--define(QUEUE_MSG_RATES, [disk_reads, disk_writes]).
-
--define(MSG_RATES, ?FINE_STATS ++ ?QUEUE_MSG_RATES).
-
--define(QUEUE_MSG_COUNTS, [messages, messages_ready, messages_unacknowledged]).
-
--define(COARSE_NODE_STATS,
-        [mem_used, fd_used, sockets_used, proc_used, disk_free,
-         io_read_count,  io_read_bytes,  io_read_avg_time,
-         io_write_count, io_write_bytes, io_write_avg_time,
-         io_sync_count,  io_sync_avg_time,
-         io_seek_count,  io_seek_avg_time,
-         io_reopen_count, mnesia_ram_tx_count,  mnesia_disk_tx_count,
-         msg_store_read_count, msg_store_write_count,
-         queue_index_journal_write_count,
-         queue_index_write_count, queue_index_read_count]).
-
--define(COARSE_NODE_NODE_STATS, [send_bytes, recv_bytes]).
-
-%% Normally 0 and no history means "has never happened, don't
-%% report". But for these things we do want to report even at 0 with
-%% no history.
--define(ALWAYS_REPORT_STATS,
-        [io_read_avg_time, io_write_avg_time,
-         io_sync_avg_time | ?QUEUE_MSG_COUNTS]).
-
--define(COARSE_CONN_STATS, [recv_oct, send_oct]).
-
--define(GC_INTERVAL, 5000).
--define(GC_MIN_ROWS, 100).
--define(GC_MIN_RATIO, 0.01).
-
--define(DROP_LENGTH, 1000).
-
-prioritise_cast({event, #event{type  = Type,
-                               props = Props}}, Len, _State)
-  when (Type =:= channel_stats orelse
-        Type =:= queue_stats) andalso Len > ?DROP_LENGTH ->
-    case pget(idle_since, Props) of
-        unknown -> drop;
-        _       -> 0
-    end;
-prioritise_cast(_Msg, _Len, _State) ->
-    0.
-
-%% We want timely replies to queries even when overloaded, so return 5
-%% as priority. Also we only have access to the queue length here, not
-%% in handle_call/3, so stash it in the dictionary. This is a bit ugly
-%% but better than fiddling with gen_server2 even more.
-prioritise_call(_Msg, _From, Len, _State) ->
-    put(last_queue_length, Len),
-    5.
-
-%%----------------------------------------------------------------------------
-%% API
-%%----------------------------------------------------------------------------
-
-start_link() ->
-    Ref = make_ref(),
-    case gen_server2:start_link({global, ?MODULE}, ?MODULE, [Ref], []) of
-        {ok, Pid} -> register(?MODULE, Pid), %% [1]
-                     rabbit:force_event_refresh(Ref),
-                     {ok, Pid};
-        Else      -> Else
-    end.
-%% [1] For debugging it's helpful to locally register the name too
-%% since that shows up in places global names don't.
-
-%% R = Ranges, M = Mode
-augment_exchanges(Xs, R, M) -> safe_call({augment_exchanges, Xs, R, M}, Xs).
-augment_queues(Qs, R, M)    -> safe_call({augment_queues, Qs, R, M}, Qs).
-augment_vhosts(VHosts, R)   -> safe_call({augment_vhosts, VHosts, R}, VHosts).
-augment_nodes(Nodes, R)     -> safe_call({augment_nodes, Nodes, R}, Nodes).
-
-get_channel(Name, R)        -> safe_call({get_channel, Name, R}, not_found).
-get_connection(Name, R)     -> safe_call({get_connection, Name, R}, not_found).
-
-get_all_channels(R)         -> safe_call({get_all_channels, R}).
-get_all_connections(R)      -> safe_call({get_all_connections, R}).
-
-get_all_consumers()         -> safe_call({get_all_consumers, all}).
-get_all_consumers(V)        -> safe_call({get_all_consumers, V}).
-
-get_overview(User, R)       -> safe_call({get_overview, User, R}).
-get_overview(R)             -> safe_call({get_overview, all, R}).
-
-override_lookups(Lookups)   -> safe_call({override_lookups, Lookups}).
-reset_lookups()             -> safe_call(reset_lookups).
-
-safe_call(Term)          -> safe_call(Term, []).
-safe_call(Term, Default) -> safe_call(Term, Default, 1).
-
-%% See rabbit_mgmt_sup_sup for a discussion of the retry logic.
-safe_call(Term, Default, Retries) ->
-    rabbit_misc:with_exit_handler(
-      fun () ->
-              case Retries of
-                  0 -> Default;
-                  _ -> rabbit_mgmt_sup_sup:start_child(),
-                       safe_call(Term, Default, Retries - 1)
-              end
-      end,
-      fun () -> gen_server2:call({global, ?MODULE}, Term, infinity) end).
-
-%%----------------------------------------------------------------------------
-%% Internal, gen_server2 callbacks
-%%----------------------------------------------------------------------------
-
-init([Ref]) ->
-    %% When Rabbit is overloaded, it's usually especially important
-    %% that the management plugin work.
-    process_flag(priority, high),
-    {ok, Interval} = application:get_env(rabbit, collect_statistics_interval),
-    {ok, RatesMode} = application:get_env(rabbitmq_management, rates_mode),
-    rabbit_node_monitor:subscribe(self()),
-    rabbit_log:info("Statistics database started.~n"),
-    Table = fun () -> ets:new(rabbit_mgmt_db, [ordered_set]) end,
-    Tables = orddict:from_list([{Key, Table()} || Key <- ?TABLES]),
-    {ok, set_gc_timer(
-           reset_lookups(
-             #state{interval               = Interval,
-                    tables                 = Tables,
-                    old_stats              = Table(),
-                    aggregated_stats       = Table(),
-                    aggregated_stats_index = Table(),
-                    event_refresh_ref      = Ref,
-                    rates_mode             = RatesMode})), hibernate,
-     {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}.
-
-handle_call({augment_exchanges, Xs, Ranges, basic}, _From, State) ->
-    reply(list_exchange_stats(Ranges, Xs, State), State);
-
-handle_call({augment_exchanges, Xs, Ranges, full}, _From, State) ->
-    reply(detail_exchange_stats(Ranges, Xs, State), State);
-
-handle_call({augment_queues, Qs, Ranges, basic}, _From, State) ->
-    reply(list_queue_stats(Ranges, Qs, State), State);
-
-handle_call({augment_queues, Qs, Ranges, full}, _From, State) ->
-    reply(detail_queue_stats(Ranges, Qs, State), State);
-
-handle_call({augment_vhosts, VHosts, Ranges}, _From, State) ->
-    reply(vhost_stats(Ranges, VHosts, State), State);
-
-handle_call({augment_nodes, Nodes, Ranges}, _From, State) ->
-    {reply, node_stats(Ranges, Nodes, State), State};
-
-handle_call({get_channel, Name, Ranges}, _From,
-            State = #state{tables = Tables}) ->
-    case created_event(Name, channel_stats, Tables) of
-        not_found -> reply(not_found, State);
-        Ch        -> [Result] = detail_channel_stats(Ranges, [Ch], State),
-                     reply(Result, State)
-    end;
-
-handle_call({get_connection, Name, Ranges}, _From,
-            State = #state{tables = Tables}) ->
-    case created_event(Name, connection_stats, Tables) of
-        not_found -> reply(not_found, State);
-        Conn      -> [Result] = connection_stats(Ranges, [Conn], State),
-                     reply(Result, State)
-    end;
-
-handle_call({get_all_channels, Ranges}, _From,
-            State = #state{tables = Tables}) ->
-    Chans = created_events(channel_stats, Tables),
-    reply(list_channel_stats(Ranges, Chans, State), State);
-
-handle_call({get_all_connections, Ranges}, _From,
-            State = #state{tables = Tables}) ->
-    Conns = created_events(connection_stats, Tables),
-    reply(connection_stats(Ranges, Conns, State), State);
-
-handle_call({get_all_consumers, VHost},
-            _From, State = #state{tables = Tables}) ->
-    All = ets:tab2list(orddict:fetch(consumers_by_queue, Tables)),
-    {reply, [augment_msg_stats(
-               augment_consumer(Obj), State) ||
-                {{#resource{virtual_host = VHostC}, _Ch, _CTag}, Obj} <- All,
-                VHost =:= all orelse VHost =:= VHostC], State};
-
-handle_call({get_overview, User, Ranges}, _From,
-            State = #state{tables = Tables}) ->
-    VHosts = case User of
-                 all -> rabbit_vhost:list();
-                 _   -> rabbit_mgmt_util:list_visible_vhosts(User)
-             end,
-    %% TODO: there's no reason we can't do an overview of send_oct and
-    %% recv_oct now!
-    VStats = [read_simple_stats(vhost_stats, VHost, State) ||
-                 VHost <- VHosts],
-    MessageStats = [overview_sum(Type, VStats) || Type <- ?MSG_RATES],
-    QueueStats = [overview_sum(Type, VStats) || Type <- ?QUEUE_MSG_COUNTS],
-    F = case User of
-            all -> fun (L) -> length(L) end;
-            _   -> fun (L) -> length(rabbit_mgmt_util:filter_user(L, User)) end
-        end,
-    %% Filtering out the user's consumers would be rather expensive so let's
-    %% just not show it
-    Consumers = case User of
-                    all -> Table = orddict:fetch(consumers_by_queue, Tables),
-                           [{consumers, ets:info(Table, size)}];
-                    _   -> []
-                end,
-    ObjectTotals = Consumers ++
-        [{queues,      length([Q || V <- VHosts,
-                                    Q <- rabbit_amqqueue:list(V)])},
-         {exchanges,   length([X || V <- VHosts,
-                                    X <- rabbit_exchange:list(V)])},
-         {connections, F(created_events(connection_stats, Tables))},
-         {channels,    F(created_events(channel_stats, Tables))}],
-    reply([{message_stats, format_samples(Ranges, MessageStats, State)},
-           {queue_totals,  format_samples(Ranges, QueueStats, State)},
-           {object_totals, ObjectTotals},
-           {statistics_db_event_queue, get(last_queue_length)}], State);
-
-handle_call({override_lookups, Lookups}, _From, State) ->
-    reply(ok, State#state{lookups = Lookups});
-
-handle_call(reset_lookups, _From, State) ->
-    reply(ok, reset_lookups(State));
-
-%% Used in rabbit_mgmt_test_db where we need guarantees events have
-%% been handled before querying
-handle_call({event, Event = #event{reference = none}}, _From, State) ->
-    handle_event(Event, State),
-    reply(ok, State);
-
-handle_call(_Request, _From, State) ->
-    reply(not_understood, State).
-
-%% Only handle events that are real, or pertain to a force-refresh
-%% that we instigated.
-handle_cast({event, Event = #event{reference = none}}, State) ->
-    handle_event(Event, State),
-    noreply(State);
-
-handle_cast({event, Event = #event{reference = Ref}},
-            State = #state{event_refresh_ref = Ref}) ->
-    handle_event(Event, State),
-    noreply(State);
-
-handle_cast(_Request, State) ->
-    noreply(State).
-
-handle_info(gc, State) ->
-    noreply(set_gc_timer(gc_batch(State)));
-
-handle_info({node_down, Node}, State = #state{tables = Tables}) ->
-    Conns = created_events(connection_stats, Tables),
-    Chs = created_events(channel_stats, Tables),
-    delete_all_from_node(connection_closed, Node, Conns, State),
-    delete_all_from_node(channel_closed, Node, Chs, State),
-    noreply(State);
-
-handle_info(_Info, State) ->
-    noreply(State).
-
-terminate(_Arg, _State) ->
-    ok.
-
-code_change(_OldVsn, State, _Extra) ->
-    {ok, State}.
-
-reply(Reply, NewState) -> {reply, Reply, NewState, hibernate}.
-noreply(NewState) -> {noreply, NewState, hibernate}.
-
-set_gc_timer(State) ->
-    TRef = erlang:send_after(?GC_INTERVAL, self(), gc),
-    State#state{gc_timer = TRef}.
-
-reset_lookups(State) ->
-    State#state{lookups = [{exchange, fun rabbit_exchange:lookup/1},
-                           {queue,    fun rabbit_amqqueue:lookup/1}]}.
-
-handle_pre_hibernate(State) ->
-    %% rabbit_event can end up holding on to some memory after a busy
-    %% workout, but it's not a gen_server so we can't make it
-    %% hibernate. The best we can do is forcibly GC it here (if
-    %% rabbit_mgmt_db is hibernating the odds are rabbit_event is
-    %% quiescing in some way too).
-    rpc:multicall(
-      rabbit_mnesia:cluster_nodes(running), rabbit_mgmt_db_handler, gc, []),
-    {hibernate, State}.
-
-format_message_queue(Opt, MQ) -> rabbit_misc:format_message_queue(Opt, MQ).
-
-delete_all_from_node(Type, Node, Items, State) ->
-    [case node(Pid) of
-         Node -> handle_event(#event{type = Type, props = [{pid, Pid}]}, State);
-         _    -> ok
-     end || Item <- Items, Pid <- [pget(pid, Item)]].
-
-%%----------------------------------------------------------------------------
-%% Internal, utilities
-%%----------------------------------------------------------------------------
-
-pget(Key, List) -> pget(Key, List, unknown).
-
-%% id_name() and id() are for use when handling events, id_lookup()
-%% for when augmenting. The difference is that when handling events a
-%% queue name will be a resource, but when augmenting we will be
-%% passed a queue proplist that will already have been formatted -
-%% i.e. it will have name and vhost keys.
-id_name(node_stats)       -> name;
-id_name(node_node_stats)  -> route;
-id_name(vhost_stats)      -> name;
-id_name(queue_stats)      -> name;
-id_name(exchange_stats)   -> name;
-id_name(channel_stats)    -> pid;
-id_name(connection_stats) -> pid.
-
-id(Type, List) -> pget(id_name(Type), List).
-
-id_lookup(queue_stats, List) ->
-    rabbit_misc:r(pget(vhost, List), queue, pget(name, List));
-id_lookup(exchange_stats, List) ->
-    rabbit_misc:r(pget(vhost, List), exchange, pget(name, List));
-id_lookup(Type, List) ->
-    id(Type, List).
-
-lookup_element(Table, Key) -> lookup_element(Table, Key, 2).
-
-lookup_element(Table, Key, Pos) ->
-    try ets:lookup_element(Table, Key, Pos)
-    catch error:badarg -> []
-    end.
-
-fine_stats_id(ChPid, {Q, X}) -> {ChPid, Q, X};
-fine_stats_id(ChPid, QorX)   -> {ChPid, QorX}.
-
-floor(TS, #state{interval = Interval}) ->
-    rabbit_mgmt_util:floor(rabbit_mgmt_format:now_to_ms(TS), Interval).
-ceil(TS, #state{interval = Interval}) ->
-    rabbit_mgmt_util:ceil (rabbit_mgmt_format:now_to_ms(TS), Interval).
-
-details_key(Key) -> list_to_atom(atom_to_list(Key) ++ "_details").
-
-%%----------------------------------------------------------------------------
-%% Internal, event-receiving side
-%%----------------------------------------------------------------------------
-
-handle_event(#event{type = queue_stats, props = Stats, timestamp = Timestamp},
-             State) ->
-    handle_stats(queue_stats, Stats, Timestamp,
-                 [{fun rabbit_mgmt_format:properties/1,[backing_queue_status]},
-                  {fun rabbit_mgmt_format:now_to_str/1, [idle_since]},
-                  {fun rabbit_mgmt_format:queue_state/1, [state]}],
-                 ?QUEUE_MSG_COUNTS, ?QUEUE_MSG_RATES, State);
-
-handle_event(Event = #event{type = queue_deleted,
-                            props = [{name, Name}],
-                            timestamp = Timestamp},
-             State = #state{old_stats = OldTable}) ->
-    delete_consumers(Name, consumers_by_queue, consumers_by_channel, State),
-    %% This is fiddly. Unlike for connections and channels, we need to
-    %% decrease any amalgamated coarse stats for [messages,
-    %% messages_ready, messages_unacknowledged] for this queue - since
-    %% the queue's deletion means we have really got rid of messages!
-    Id = {coarse, {queue_stats, Name}},
-    %% This ceil must correspond to the ceil in append_samples/5
-    TS = ceil(Timestamp, State),
-    OldStats = lookup_element(OldTable, Id),
-    [record_sample(Id, {Key, -pget(Key, OldStats, 0), TS, State}, true, State)
-     || Key <- ?QUEUE_MSG_COUNTS],
-    delete_samples(channel_queue_stats,  {'_', Name}, State),
-    delete_samples(queue_exchange_stats, {Name, '_'}, State),
-    delete_samples(queue_stats,          Name,        State),
-    handle_deleted(queue_stats, Event, State);
-
-handle_event(Event = #event{type = exchange_deleted,
-                            props = [{name, Name}]}, State) ->
-    delete_samples(channel_exchange_stats,  {'_', Name}, State),
-    delete_samples(queue_exchange_stats,    {'_', Name}, State),
-    delete_samples(exchange_stats,          Name,        State),
-    handle_deleted(exchange_stats, Event, State);
-
-handle_event(#event{type = vhost_deleted,
-                    props = [{name, Name}]}, State) ->
-    delete_samples(vhost_stats, Name, State);
-
-handle_event(#event{type = connection_created, props = Stats}, State) ->
-    handle_created(
-      connection_stats, Stats,
-      [{fun rabbit_mgmt_format:addr/1,         [host, peer_host]},
-       {fun rabbit_mgmt_format:port/1,         [port, peer_port]},
-       {fun rabbit_mgmt_format:protocol/1,     [protocol]},
-       {fun rabbit_mgmt_format:amqp_table/1,   [client_properties]}], State);
-
-handle_event(#event{type = connection_stats, props = Stats,
-                    timestamp = Timestamp},
-             State) ->
-    handle_stats(connection_stats, Stats, Timestamp, [], ?COARSE_CONN_STATS,
-                 State);
-
-handle_event(Event = #event{type  = connection_closed,
-                            props = [{pid, Pid}]}, State) ->
-    delete_samples(connection_stats, Pid, State),
-    handle_deleted(connection_stats, Event, State);
-
-handle_event(#event{type = channel_created, props = Stats}, State) ->
-    handle_created(channel_stats, Stats, [], State);
-
-handle_event(#event{type = channel_stats, props = Stats, timestamp = Timestamp},
-             State = #state{old_stats = OldTable}) ->
-    handle_stats(channel_stats, Stats, Timestamp,
-                 [{fun rabbit_mgmt_format:now_to_str/1, [idle_since]}],
-                 [], State),
-    ChPid = id(channel_stats, Stats),
-    AllStats = [old_fine_stats(Type, Stats, State)
-                || Type <- ?FINE_STATS_TYPES],
-    ets:match_delete(OldTable, {{fine, {ChPid, '_'}},      '_'}),
-    ets:match_delete(OldTable, {{fine, {ChPid, '_', '_'}}, '_'}),
-    [handle_fine_stats(Timestamp, AllStatsElem, State)
-     || AllStatsElem <- AllStats];
-
-handle_event(Event = #event{type = channel_closed,
-                            props = [{pid, Pid}]},
-             State = #state{old_stats = Old}) ->
-    delete_consumers(Pid, consumers_by_channel, consumers_by_queue, State),
-    delete_samples(channel_queue_stats,    {Pid, '_'}, State),
-    delete_samples(channel_exchange_stats, {Pid, '_'}, State),
-    delete_samples(channel_stats,          Pid,        State),
-    handle_deleted(channel_stats, Event, State),
-    ets:match_delete(Old, {{fine, {Pid, '_'}},      '_'}),
-    ets:match_delete(Old, {{fine, {Pid, '_', '_'}}, '_'});
-
-handle_event(#event{type = consumer_created, props = Props}, State) ->
-    Fmt = [{fun rabbit_mgmt_format:amqp_table/1, [arguments]}],
-    handle_consumer(fun(Table, Id, P0) ->
-                            P = rabbit_mgmt_format:format(P0, Fmt),
-                            ets:insert(Table, {Id, P})
-                    end,
-                    Props, State);
-
-handle_event(#event{type = consumer_deleted, props = Props}, State) ->
-    handle_consumer(fun(Table, Id, _P) -> ets:delete(Table, Id) end,
-                    Props, State);
-
-%% TODO: we don't clear up after dead nodes here - this is a very tiny
-%% leak every time a node is permanently removed from the cluster. Do
-%% we care?
-handle_event(#event{type = node_stats, props = Stats0, timestamp = Timestamp},
-             State) ->
-    Stats = proplists:delete(persister_stats, Stats0) ++
-        pget(persister_stats, Stats0),
-    handle_stats(node_stats, Stats, Timestamp, [], ?COARSE_NODE_STATS, State);
-
-handle_event(#event{type = node_node_stats, props = Stats,
-                    timestamp = Timestamp}, State) ->
-    handle_stats(node_node_stats, Stats, Timestamp, [], ?COARSE_NODE_NODE_STATS,
-                 State);
-
-handle_event(Event = #event{type  = node_node_deleted,
-                            props = [{route, Route}]}, State) ->
-    delete_samples(node_node_stats, Route, State),
-    handle_deleted(node_node_stats, Event, State);
-
-handle_event(_Event, _State) ->
-    ok.
-
-handle_created(TName, Stats, Funs, State = #state{tables = Tables}) ->
-    Formatted = rabbit_mgmt_format:format(Stats, Funs),
-    ets:insert(orddict:fetch(TName, Tables), {{id(TName, Stats), create},
-                                              Formatted,
-                                              pget(name, Stats)}),
-    {ok, State}.
-
-handle_stats(TName, Stats, Timestamp, Funs, RatesKeys, State) ->
-    handle_stats(TName, Stats, Timestamp, Funs, RatesKeys, [], State).
-
-handle_stats(TName, Stats, Timestamp, Funs, RatesKeys, NoAggRatesKeys,
-             State = #state{tables = Tables, old_stats = OldTable}) ->
-    Id = id(TName, Stats),
-    IdSamples = {coarse, {TName, Id}},
-    OldStats = lookup_element(OldTable, IdSamples),
-    append_samples(
-      Stats, Timestamp, OldStats, IdSamples, RatesKeys, true, State),
-    append_samples(
-      Stats, Timestamp, OldStats, IdSamples, NoAggRatesKeys, false, State),
-    StripKeys = [id_name(TName)] ++ RatesKeys ++ ?FINE_STATS_TYPES,
-    Stats1 = [{K, V} || {K, V} <- Stats, not lists:member(K, StripKeys)],
-    Stats2 = rabbit_mgmt_format:format(Stats1, Funs),
-    ets:insert(orddict:fetch(TName, Tables), {{Id, stats}, Stats2, Timestamp}),
-    {ok, State}.
-
-handle_deleted(TName, #event{props = Props}, State = #state{tables    = Tables,
-                                                            old_stats = Old}) ->
-    Id = id(TName, Props),
-    case orddict:find(TName, Tables) of
-        {ok, Table} -> ets:delete(Table, {Id, create}),
-                       ets:delete(Table, {Id, stats});
-        error       -> ok
-    end,
-    ets:delete(Old, {coarse, {TName, Id}}),
-    {ok, State}.
-
-handle_consumer(Fun, Props, State = #state{tables = Tables}) ->
-    P = rabbit_mgmt_format:format(Props, []),
-    CTag = pget(consumer_tag, P),
-    Q    = pget(queue,        P),
-    Ch   = pget(channel,      P),
-    QTable  = orddict:fetch(consumers_by_queue,   Tables),
-    ChTable = orddict:fetch(consumers_by_channel, Tables),
-    Fun(QTable,  {Q, Ch, CTag}, P),
-    Fun(ChTable, {Ch, Q, CTag}, P),
-    {ok, State}.
-
-%% The consumer_deleted event is emitted by queues themselves -
-%% therefore in the event that a queue dies suddenly we may not get
-%% it. The best way to handle this is to make sure we also clean up
-%% consumers when we hear about any queue going down.
-delete_consumers(PrimId, PrimTableName, SecTableName,
-                 #state{tables = Tables}) ->
-    Table1 = orddict:fetch(PrimTableName, Tables),
-    Table2 = orddict:fetch(SecTableName, Tables),
-    SecIdCTags = ets:match(Table1, {{PrimId, '$1', '$2'}, '_'}),
-    ets:match_delete(Table1, {{PrimId, '_', '_'}, '_'}),
-    [ets:delete(Table2, {SecId, PrimId, CTag}) || [SecId, CTag] <- SecIdCTags].
-
-old_fine_stats(Type, Props, #state{old_stats = Old}) ->
-    case pget(Type, Props) of
-        unknown       -> ignore;
-        AllFineStats0 -> ChPid = id(channel_stats, Props),
-                         [begin
-                              Id = fine_stats_id(ChPid, Ids),
-                              {Id, Stats, lookup_element(Old, {fine, Id})}
-                          end || {Ids, Stats} <- AllFineStats0]
-    end.
-
-handle_fine_stats(_Timestamp, ignore, _State) ->
-    ok;
-
-handle_fine_stats(Timestamp, AllStats, State) ->
-    [handle_fine_stat(Id, Stats, Timestamp, OldStats, State) ||
-        {Id, Stats, OldStats} <- AllStats].
-
-handle_fine_stat(Id, Stats, Timestamp, OldStats, State) ->
-    Total = lists:sum([V || {K, V} <- Stats, lists:member(K, ?DELIVER_GET)]),
-    Stats1 = case Total of
-                 0 -> Stats;
-                 _ -> [{deliver_get, Total}|Stats]
-             end,
-    append_samples(Stats1, Timestamp, OldStats, {fine, Id}, all, true, State).
-
-delete_samples(Type, {Id, '_'}, State) ->
-    delete_samples_with_index(Type, Id, fun forward/2, State);
-delete_samples(Type, {'_', Id}, State) ->
-    delete_samples_with_index(Type, Id, fun reverse/2, State);
-delete_samples(Type, Id, #state{aggregated_stats = ETS}) ->
-    ets:match_delete(ETS, delete_match(Type, Id)).
-
-delete_samples_with_index(Type, Id, Order,
-                          #state{aggregated_stats       = ETS,
-                                 aggregated_stats_index = ETSi}) ->
-    Ids2 = lists:append(ets:match(ETSi, {{Type, Id, '$1'}})),
-    ets:match_delete(ETSi, {{Type, Id, '_'}}),
-    [begin
-         ets:match_delete(ETS, delete_match(Type, Order(Id, Id2))),
-         ets:match_delete(ETSi, {{Type, Id2, Id}})
-     end || Id2 <- Ids2].
-
-forward(A, B) -> {A, B}.
-reverse(A, B) -> {B, A}.
-
-delete_match(Type, Id) -> {{{Type, Id}, '_'}, '_'}.
-
-append_samples(Stats, TS, OldStats, Id, Keys, Agg,
-               State = #state{old_stats = OldTable}) ->
-    case ignore_coarse_sample(Id, State) of
-        false ->
-            %% This ceil must correspond to the ceil in handle_event
-            %% queue_deleted
-            NewMS = ceil(TS, State),
-            case Keys of
-                all -> [append_sample(K, V, NewMS, OldStats, Id, Agg, State)
-                        || {K, V} <- Stats];
-                _   -> [append_sample(K, V, NewMS, OldStats, Id, Agg, State)
-                        || K <- Keys,
-                           V <- [pget(K, Stats)],
-                           V =/= 0 orelse lists:member(K, ?ALWAYS_REPORT_STATS)]
-            end,
-            ets:insert(OldTable, {Id, Stats});
-        true ->
-            ok
-    end.
-
-append_sample(Key, Val, NewMS, OldStats, Id, Agg, State) when is_number(Val) ->
-    OldVal = case pget(Key, OldStats, 0) of
-        N when is_number(N) -> N;
-        _                   -> 0
-    end,
-    record_sample(Id, {Key, Val - OldVal, NewMS, State}, Agg, State),
-    ok;
-append_sample(_Key, _Value, _NewMS, _OldStats, _Id, _Agg, _State) ->
-    ok.
-
-ignore_coarse_sample({coarse, {queue_stats, Q}}, State) ->
-    not object_exists(Q, State);
-ignore_coarse_sample(_, _) ->
-    false.
-
-%% Node stats do not have a vhost of course
-record_sample({coarse, {node_stats, _Node} = Id}, Args, true, _State) ->
-    record_sample0(Id, Args);
-
-record_sample({coarse, {node_node_stats, _Names} = Id}, Args, true, _State) ->
-    record_sample0(Id, Args);
-
-record_sample({coarse, Id}, Args, false, _State) ->
-    record_sample0(Id, Args);
-
-record_sample({coarse, Id}, Args, true, State) ->
-    record_sample0(Id, Args),
-    record_sample0({vhost_stats, vhost(Id, State)}, Args);
-
-%% Deliveries / acks (Q -> Ch)
-record_sample({fine, {Ch, Q = #resource{kind = queue}}}, Args, true, State) ->
-    case object_exists(Q, State) of
-        true  -> record_sample0({channel_queue_stats, {Ch, Q}}, Args),
-                 record_sample0({queue_stats,         Q},       Args);
-        false -> ok
-    end,
-    record_sample0({channel_stats, Ch},       Args),
-    record_sample0({vhost_stats,   vhost(Q)}, Args);
-
-%% Publishes / confirms (Ch -> X)
-record_sample({fine, {Ch, X = #resource{kind = exchange}}}, Args, true,State) ->
-    case object_exists(X, State) of
-        true  -> record_sample0({channel_exchange_stats, {Ch, X}}, Args),
-                 record_sampleX(publish_in,              X,        Args);
-        false -> ok
-    end,
-    record_sample0({channel_stats, Ch},       Args),
-    record_sample0({vhost_stats,   vhost(X)}, Args);
-
-%% Publishes (but not confirms) (Ch -> X -> Q)
-record_sample({fine, {_Ch,
-                      Q = #resource{kind = queue},
-                      X = #resource{kind = exchange}}}, Args, true, State) ->
-    %% TODO This one logically feels like it should be here. It would
-    %% correspond to "publishing channel message rates to queue" -
-    %% which would be nice to handle - except we don't. And just
-    %% uncommenting this means it gets merged in with "consuming
-    %% channel delivery from queue" - which is not very helpful.
-    %% record_sample0({channel_queue_stats, {Ch, Q}}, Args),
-    QExists = object_exists(Q, State),
-    XExists = object_exists(X, State),
-    case QExists of
-        true  -> record_sample0({queue_stats,          Q},       Args);
-        false -> ok
-    end,
-    case QExists andalso XExists of
-        true  -> record_sample0({queue_exchange_stats, {Q,  X}}, Args);
-        false -> ok
-    end,
-    case XExists of
-        true  -> record_sampleX(publish_out,           X,        Args);
-        false -> ok
-    end.
-
-%% We have to check the queue and exchange objects still exist since
-%% their deleted event could be overtaken by a channel stats event
-%% which contains fine stats referencing them. That's also why we
-%% don't need to check the channels exist - their deleted event can't
-%% be overtaken by their own last stats event.
-%%
-%% Also, sometimes the queue_deleted event is not emitted by the queue
-%% (in the nodedown case) - so it can overtake the final queue_stats
-%% event (which is not *guaranteed* to be lost). So we make a similar
-%% check for coarse queue stats.
-%%
-%% We can be sure that mnesia will be up to date by the time we receive
-%% the event (even though we dirty read) since the deletions are
-%% synchronous and we do not emit the deleted event until after the
-%% deletion has occurred.
-object_exists(Name = #resource{kind = Kind}, #state{lookups = Lookups}) ->
-    case (pget(Kind, Lookups))(Name) of
-        {ok, _} -> true;
-        _       -> false
-    end.
-
-vhost(#resource{virtual_host = VHost}) -> VHost.
-
-vhost({queue_stats, #resource{virtual_host = VHost}}, _State) ->
-    VHost;
-vhost({TName, Pid}, #state{tables = Tables}) ->
-    Table = orddict:fetch(TName, Tables),
-    pget(vhost, lookup_element(Table, {Pid, create})).
-
-%% exchanges have two sets of "publish" stats, so rearrange things a touch
-record_sampleX(RenamePublishTo, X, {publish, Diff, TS, State}) ->
-    record_sample0({exchange_stats, X}, {RenamePublishTo, Diff, TS, State});
-record_sampleX(_RenamePublishTo, X, {Type, Diff, TS, State}) ->
-    record_sample0({exchange_stats, X}, {Type, Diff, TS, State}).
-
-%% Ignore case where ID1 and ID2 are in a tuple, i.e. detailed stats,
-%% when in basic mode
-record_sample0({Type, {_ID1, _ID2}}, {_, _, _, #state{rates_mode = basic}})
-  when Type =/= node_node_stats ->
-    ok;
-record_sample0(Id0, {Key, Diff, TS, #state{aggregated_stats       = ETS,
-                                           aggregated_stats_index = ETSi}}) ->
-    Id = {Id0, Key},
-    Old = case lookup_element(ETS, Id) of
-              [] -> case Id0 of
-                        {Type, {Id1, Id2}} ->
-                            ets:insert(ETSi, {{Type, Id2, Id1}}),
-                            ets:insert(ETSi, {{Type, Id1, Id2}});
-                        _ ->
-                            ok
-                    end,
-                    rabbit_mgmt_stats:blank();
-              E  -> E
-          end,
-    ets:insert(ETS, {Id, rabbit_mgmt_stats:record(TS, Diff, Old)}).
-
-%%----------------------------------------------------------------------------
-%% Internal, querying side
-%%----------------------------------------------------------------------------
-
--define(QUEUE_DETAILS,
-        {queue_stats, [{incoming,   queue_exchange_stats, fun first/1},
-                       {deliveries, channel_queue_stats,  fun second/1}]}).
-
--define(EXCHANGE_DETAILS,
-        {exchange_stats, [{incoming, channel_exchange_stats, fun second/1},
-                          {outgoing, queue_exchange_stats,   fun second/1}]}).
-
--define(CHANNEL_DETAILS,
-        {channel_stats, [{publishes,  channel_exchange_stats, fun first/1},
-                         {deliveries, channel_queue_stats,    fun first/1}]}).
-
--define(NODE_DETAILS,
-        {node_stats, [{cluster_links, node_node_stats, fun first/1}]}).
-
-first(Id)  -> {Id, '$1'}.
-second(Id) -> {'$1', Id}.
-
-list_queue_stats(Ranges, Objs, State) ->
-    adjust_hibernated_memory_use(
-      merge_stats(Objs, queue_funs(Ranges, State))).
-
-detail_queue_stats(Ranges, Objs, State) ->
-    adjust_hibernated_memory_use(
-      merge_stats(Objs, [consumer_details_fun(
-                           fun (Props) -> id_lookup(queue_stats, Props) end,
-                           consumers_by_queue, State),
-                         detail_stats_fun(Ranges, ?QUEUE_DETAILS, State)
-                         | queue_funs(Ranges, State)])).
-
-queue_funs(Ranges, State) ->
-    [basic_stats_fun(queue_stats, State),
-     simple_stats_fun(Ranges, queue_stats, State),
-     augment_msg_stats_fun(State)].
-
-list_exchange_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [simple_stats_fun(Ranges, exchange_stats, State),
-                       augment_msg_stats_fun(State)]).
-
-detail_exchange_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [simple_stats_fun(Ranges, exchange_stats, State),
-                       detail_stats_fun(Ranges, ?EXCHANGE_DETAILS, State),
-                       augment_msg_stats_fun(State)]).
-
-connection_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [basic_stats_fun(connection_stats, State),
-                       simple_stats_fun(Ranges, connection_stats, State),
-                       augment_msg_stats_fun(State)]).
-
-list_channel_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [basic_stats_fun(channel_stats, State),
-                       simple_stats_fun(Ranges, channel_stats, State),
-                       augment_msg_stats_fun(State)]).
-
-detail_channel_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [basic_stats_fun(channel_stats, State),
-                       simple_stats_fun(Ranges, channel_stats, State),
-                       consumer_details_fun(
-                         fun (Props) -> pget(pid, Props) end,
-                         consumers_by_channel, State),
-                       detail_stats_fun(Ranges, ?CHANNEL_DETAILS, State),
-                       augment_msg_stats_fun(State)]).
-
-vhost_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [simple_stats_fun(Ranges, vhost_stats, State)]).
-
-node_stats(Ranges, Objs, State) ->
-    merge_stats(Objs, [basic_stats_fun(node_stats, State),
-                       simple_stats_fun(Ranges, node_stats, State),
-                       detail_and_basic_stats_fun(
-                         node_node_stats, Ranges, ?NODE_DETAILS, State)]).
-
-merge_stats(Objs, Funs) ->
-    [lists:foldl(fun (Fun, Props) -> combine(Fun(Props), Props) end, Obj, Funs)
-     || Obj <- Objs].
-
-combine(New, Old) ->
-    case pget(state, Old) of
-        unknown -> New ++ Old;
-        live    -> New ++ proplists:delete(state, Old);
-        _       -> proplists:delete(state, New) ++ Old
-    end.
-
-%% i.e. the non-calculated stats
-basic_stats_fun(Type, #state{tables = Tables}) ->
-    Table = orddict:fetch(Type, Tables),
-    fun (Props) ->
-            Id = id_lookup(Type, Props),
-            lookup_element(Table, {Id, stats})
-    end.
-
-%% i.e. coarse stats, and fine stats aggregated up to a single number per thing
-simple_stats_fun(Ranges, Type, State) ->
-    fun (Props) ->
-            Id = id_lookup(Type, Props),
-            extract_msg_stats(
-              format_samples(Ranges, read_simple_stats(Type, Id, State), State))
-    end.
-
-%% i.e. fine stats that are broken out per sub-thing
-detail_stats_fun(Ranges, {IdType, FineSpecs}, State) ->
-    fun (Props) ->
-            Id = id_lookup(IdType, Props),
-            [detail_stats(Ranges, Name, AggregatedStatsType, IdFun(Id), State)
-             || {Name, AggregatedStatsType, IdFun} <- FineSpecs]
-    end.
-
-%% This does not quite do the same as detail_stats_fun +
-%% basic_stats_fun; the basic part here assumes compound keys (like
-%% detail stats) but non-calculated (like basic stats). Currently the
-%% only user of that is node-node stats.
-%%
-%% We also assume that FineSpecs is single length here (at [1]).
-detail_and_basic_stats_fun(Type, Ranges, {IdType, FineSpecs},
-                           State = #state{tables = Tables}) ->
-    Table = orddict:fetch(Type, Tables),
-    F = detail_stats_fun(Ranges, {IdType, FineSpecs}, State),
-    fun (Props) ->
-            Id = id_lookup(IdType, Props),
-            BasicStatsRaw = ets:match(Table, {{{Id, '$1'}, stats}, '$2', '_'}),
-            BasicStatsDict = dict:from_list([{K, V} || [K,V] <- BasicStatsRaw]),
-            [{K, Items}] = F(Props), %% [1]
-            Items2 = [case dict:find(id_lookup(IdType, Item), BasicStatsDict) of
-                          {ok, BasicStats} -> BasicStats ++ Item;
-                          error            -> Item
-                      end || Item <- Items],
-            [{K, Items2}]
-    end.
-
-read_simple_stats(Type, Id, #state{aggregated_stats = ETS}) ->
-    FromETS = ets:match(ETS, {{{Type, Id}, '$1'}, '$2'}),
-    [{K, V} || [K, V] <- FromETS].
-
-read_detail_stats(Type, Id, #state{aggregated_stats = ETS}) ->
-    %% Id must contain '$1'
-    FromETS = ets:match(ETS, {{{Type, Id}, '$2'}, '$3'}),
-    %% [[G, K, V]] -> [{G, [{K, V}]}] where G is Q/X/Ch, K is from
-    %% ?FINE_STATS and V is a stats tree
-    %% TODO does this need to be optimised?
-    lists:foldl(
-      fun ([G, K, V], L) ->
-              case lists:keyfind(G, 1, L) of
-                  false    -> [{G, [{K, V}]} | L];
-                  {G, KVs} -> lists:keyreplace(G, 1, L, {G, [{K, V} | KVs]})
-              end
-      end, [], FromETS).
-
-extract_msg_stats(Stats) ->
-    FineStats = lists:append([[K, details_key(K)] || K <- ?MSG_RATES]),
-    {MsgStats, Other} =
-        lists:partition(fun({K, _}) -> lists:member(K, FineStats) end, Stats),
-    case MsgStats of
-        [] -> Other;
-        _  -> [{message_stats, MsgStats} | Other]
-    end.
-
-detail_stats(Ranges, Name, AggregatedStatsType, Id, State) ->
-    {Name,
-     [[{stats, format_samples(Ranges, KVs, State)} | format_detail_id(G, State)]
-      || {G, KVs} <- read_detail_stats(AggregatedStatsType, Id, State)]}.
-
-format_detail_id(ChPid, State) when is_pid(ChPid) ->
-    augment_msg_stats([{channel, ChPid}], State);
-format_detail_id(#resource{name = Name, virtual_host = Vhost, kind = Kind},
-                 _State) ->
-    [{Kind, [{name, Name}, {vhost, Vhost}]}];
-format_detail_id(Node, _State) when is_atom(Node) ->
-    [{name, Node}].
-
-format_samples(Ranges, ManyStats, #state{interval = Interval}) ->
-    lists:append(
-      [case rabbit_mgmt_stats:is_blank(Stats) andalso
-           not lists:member(K, ?ALWAYS_REPORT_STATS) of
-           true  -> [];
-           false -> {Details, Counter} = rabbit_mgmt_stats:format(
-                                           pick_range(K, Ranges),
-                                           Stats, Interval),
-                    [{K,              Counter},
-                     {details_key(K), Details}]
-       end || {K, Stats} <- ManyStats]).
-
-pick_range(K, {RangeL, RangeM, RangeD, RangeN}) ->
-    case {lists:member(K, ?QUEUE_MSG_COUNTS),
-          lists:member(K, ?MSG_RATES),
-          lists:member(K, ?COARSE_CONN_STATS),
-          lists:member(K, ?COARSE_NODE_STATS)
-          orelse lists:member(K, ?COARSE_NODE_NODE_STATS)} of
-        {true, false, false, false} -> RangeL;
-        {false, true, false, false} -> RangeM;
-        {false, false, true, false} -> RangeD;
-        {false, false, false, true} -> RangeN
-    end.
-
-%% We do this when retrieving the queue record rather than when
-%% storing it since the memory use will drop *after* we find out about
-%% hibernation, so to do it when we receive a queue stats event would
-%% be fiddly and racy. This should be quite cheap though.
-adjust_hibernated_memory_use(Qs) ->
-    Pids = [pget(pid, Q) ||
-               Q <- Qs, pget(idle_since, Q, not_idle) =/= not_idle],
-    %% We use delegate here not for ordering reasons but because we
-    %% want to get the right amount of parallelism and minimise
-    %% cross-cluster communication.
-    {Mem, _BadNodes} = delegate:invoke(Pids, {erlang, process_info, [memory]}),
-    MemDict = dict:from_list([{P, M} || {P, M = {memory, _}} <- Mem]),
-    [case dict:find(pget(pid, Q), MemDict) of
-         error        -> Q;
-         {ok, Memory} -> [Memory|proplists:delete(memory, Q)]
-     end || Q <- Qs].
-
-created_event(Name, Type, Tables) ->
-    Table = orddict:fetch(Type, Tables),
-    case ets:match(Table, {{'$1', create}, '_', Name}) of
-        []     -> not_found;
-        [[Id]] -> lookup_element(Table, {Id, create})
-    end.
-
-created_events(Type, Tables) ->
-    [Facts || {{_, create}, Facts, _Name}
-                  <- ets:tab2list(orddict:fetch(Type, Tables))].
-
-consumer_details_fun(KeyFun, TableName, State = #state{tables = Tables}) ->
-    Table = orddict:fetch(TableName, Tables),
-    fun ([])    -> [];
-        (Props) -> Pattern = {KeyFun(Props), '_', '_'},
-                   [{consumer_details,
-                     [augment_msg_stats(augment_consumer(Obj), State)
-                      || Obj <- lists:append(
-                                  ets:match(Table, {Pattern, '$1'}))]}]
-    end.
-
-augment_consumer(Obj) ->
-    [{queue, rabbit_mgmt_format:resource(pget(queue, Obj))} |
-     proplists:delete(queue, Obj)].
-
-%%----------------------------------------------------------------------------
-%% Internal, query-time summing for overview
-%%----------------------------------------------------------------------------
-
-overview_sum(Type, VHostStats) ->
-    Stats = [pget(Type, VHost, rabbit_mgmt_stats:blank())
-             || VHost <- VHostStats],
-    {Type, rabbit_mgmt_stats:sum(Stats)}.
-
-%%----------------------------------------------------------------------------
-%% Internal, query-time augmentation
-%%----------------------------------------------------------------------------
-
-augment_msg_stats(Props, State) ->
-    rabbit_mgmt_format:strip_pids(
-      (augment_msg_stats_fun(State))(Props) ++ Props).
-
-augment_msg_stats_fun(State) ->
-    Funs = [{connection, fun augment_connection_pid/2},
-            {channel,    fun augment_channel_pid/2},
-            {owner_pid,  fun augment_connection_pid/2}],
-    fun (Props) -> augment(Props, Funs, State) end.
-
-augment(Items, Funs, State) ->
-    Augmented = [augment(K, Items, Fun, State) || {K, Fun} <- Funs],
-    [{K, V} || {K, V} <- Augmented, V =/= unknown].
-
-augment(K, Items, Fun, State) ->
-    Key = details_key(K),
-    case pget(K, Items) of
-        none    -> {Key, unknown};
-        unknown -> {Key, unknown};
-        Id      -> {Key, Fun(Id, State)}
-    end.
-
-augment_channel_pid(Pid, #state{tables = Tables}) ->
-    Ch = lookup_element(orddict:fetch(channel_stats, Tables),
-                        {Pid, create}),
-    Conn = lookup_element(orddict:fetch(connection_stats, Tables),
-                          {pget(connection, Ch), create}),
-    [{name,            pget(name,   Ch)},
-     {number,          pget(number, Ch)},
-     {user,            pget(user,   Ch)},
-     {connection_name, pget(name,         Conn)},
-     {peer_port,       pget(peer_port,    Conn)},
-     {peer_host,       pget(peer_host,    Conn)}].
-
-augment_connection_pid(Pid, #state{tables = Tables}) ->
-    Conn = lookup_element(orddict:fetch(connection_stats, Tables),
-                          {Pid, create}),
-    [{name,         pget(name,         Conn)},
-     {peer_port,    pget(peer_port,    Conn)},
-     {peer_host,    pget(peer_host,    Conn)}].
-
-%%----------------------------------------------------------------------------
-%% Internal, event-GCing
-%%----------------------------------------------------------------------------
-
-gc_batch(State = #state{aggregated_stats = ETS}) ->
-    {ok, Policies} = application:get_env(
-                       rabbitmq_management, sample_retention_policies),
-    Rows = erlang:max(?GC_MIN_ROWS,
-                      round(?GC_MIN_RATIO * ets:info(ETS, size))),
-    gc_batch(Rows, Policies, State).
-
-gc_batch(0, _Policies, State) ->
-    State;
-gc_batch(Rows, Policies, State = #state{aggregated_stats = ETS,
-                                        gc_next_key      = Key0}) ->
-    Key = case Key0 of
-              undefined -> ets:first(ETS);
-              _         -> ets:next(ETS, Key0)
-          end,
-    Key1 = case Key of
-               '$end_of_table' -> undefined;
-               _               -> Now = floor(os:timestamp(), State),
-                                  Stats = ets:lookup_element(ETS, Key, 2),
-                                  gc(Key, Stats, Policies, Now, ETS),
-                                  Key
-           end,
-    gc_batch(Rows - 1, Policies, State#state{gc_next_key = Key1}).
-
-gc({{Type, Id}, Key}, Stats, Policies, Now, ETS) ->
-    Policy = pget(retention_policy(Type), Policies),
-    case rabbit_mgmt_stats:gc({Policy, Now}, Stats) of
-        Stats  -> ok;
-        Stats2 -> ets:insert(ETS, {{{Type, Id}, Key}, Stats2})
-    end.
-
-retention_policy(node_stats)             -> global;
-retention_policy(node_node_stats)        -> global;
-retention_policy(vhost_stats)            -> global;
-retention_policy(queue_stats)            -> basic;
-retention_policy(exchange_stats)         -> basic;
-retention_policy(connection_stats)       -> basic;
-retention_policy(channel_stats)          -> basic;
-retention_policy(queue_exchange_stats)   -> detailed;
-retention_policy(channel_exchange_stats) -> detailed;
-retention_policy(channel_queue_stats)    -> detailed.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_stats.erl b/rabbitmq-server/plugins-src/rabbitmq-management/src/rabbit_mgmt_stats.erl
deleted file mode 100644 (file)
index 3e0c8a2..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Plugin.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2012 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_stats).
-
--include("rabbit_mgmt.hrl").
-
--export([blank/0, is_blank/1, record/3, format/3, sum/1, gc/2]).
-
--import(rabbit_misc, [pget/2]).
-
-%%----------------------------------------------------------------------------
-
-blank() -> #stats{diffs = gb_trees:empty(), base = 0}.
-
-is_blank(#stats{diffs = Diffs, base = 0}) -> gb_trees:is_empty(Diffs);
-is_blank(#stats{}) ->                        false.
-
-%%----------------------------------------------------------------------------
-%% Event-time
-%%----------------------------------------------------------------------------
-
-record(TS, Diff, Stats = #stats{diffs = Diffs}) ->
-    Diffs2 = case gb_trees:lookup(TS, Diffs) of
-                 {value, Total} -> gb_trees:update(TS, Diff + Total, Diffs);
-                 none           -> gb_trees:insert(TS, Diff, Diffs)
-             end,
-    Stats#stats{diffs = Diffs2}.
-
-%%----------------------------------------------------------------------------
-%% Query-time
-%%----------------------------------------------------------------------------
-
-format(no_range, #stats{diffs = Diffs, base = Base}, Interval) ->
-    Now = rabbit_mgmt_format:now_to_ms(os:timestamp()),
-    RangePoint = ((Now div Interval) * Interval) - Interval,
-    Count = sum_entire_tree(gb_trees:iterator(Diffs), Base),
-    {[{rate, format_rate(
-               Diffs, RangePoint, Interval, Interval)}], Count};
-
-format(Range, #stats{diffs = Diffs, base = Base}, Interval) ->
-    RangePoint = Range#range.last - Interval,
-    {Samples, Count} = extract_samples(
-                         Range, Base, gb_trees:iterator(Diffs), []),
-    Part1 = [{rate,    format_rate(
-                         Diffs, RangePoint, Range#range.incr, Interval)},
-             {samples, Samples}],
-    Length = length(Samples),
-    Part2 = case Length > 1 of
-                true  -> [{sample, S2}, {timestamp, T2}] = hd(Samples),
-                         [{sample, S1}, {timestamp, T1}] = lists:last(Samples),
-                         Total = lists:sum([pget(sample, I) || I <- Samples]),
-                         [{avg_rate, (S2 - S1) * 1000 / (T2 - T1)},
-                          {avg,      Total / Length}];
-                false -> []
-            end,
-    {Part1 ++ Part2, Count}.
-
-format_rate(Diffs, RangePoint, Incr, Interval) ->
-    case nth_largest(Diffs, 2) of
-        false   -> 0.0;
-        {TS, S} -> case TS - RangePoint of %% [0]
-                       D when D =< Incr andalso D >= 0 -> S * 1000 / Interval;
-                       _                               -> 0.0
-                   end
-    end.
-
-%% [0] Only display the rate if it's live - i.e. ((the end of the
-%% range) - interval) corresponds to the second to last data point we
-%% have. If the end of the range is earlier we have gone silent, if
-%% it's later we have been asked for a range back in time (in which
-%% case showing the correct instantaneous rate would be quite a faff,
-%% and probably unwanted). Why the second to last? Because data is
-%% still arriving for the last...
-nth_largest(Tree, N) ->
-    case gb_trees:is_empty(Tree) of
-        true              -> false;
-        false when N == 1 -> gb_trees:largest(Tree);
-        false             -> {_, _, Tree2} = gb_trees:take_largest(Tree),
-                             nth_largest(Tree2, N - 1)
-    end.
-
-sum_entire_tree(Iter, Acc) ->
-    case gb_trees:next(Iter) of
-        none            -> Acc;
-        {_TS, S, Iter2} -> sum_entire_tree(Iter2, Acc + S)
-    end.
-
-%% What we want to do here is: given the #range{}, provide a set of
-%% samples such that we definitely provide a set of samples which
-%% covers the exact range requested, despite the fact that we might
-%% not have it. We need to spin up over the entire range of the
-%% samples we *do* have since they are diff-based (and we convert to
-%% absolute values here).
-extract_samples(Range = #range{first = Next}, Base, It, Samples) ->
-    case gb_trees:next(It) of
-        {TS, S, It2} -> extract_samples1(Range, Base, TS,   S, It2, Samples);
-        none         -> extract_samples1(Range, Base, Next, 0, It,  Samples)
-    end.
-
-extract_samples1(Range = #range{first = Next, last = Last, incr = Incr},
-                 Base, TS, S, It, Samples) ->
-    if
-        %% We've gone over the range. Terminate.
-        Next > Last ->
-            {Samples, Base};
-        %% We've hit bang on a sample. Record it and move to the next.
-        Next =:= TS ->
-            extract_samples(Range#range{first = Next + Incr}, Base + S, It,
-                            append(Base + S, Next, Samples));
-        %% We haven't yet hit the beginning of our range.
-        Next > TS ->
-            extract_samples(Range, Base + S, It, Samples);
-        %% We have a valid sample, but we haven't used it up
-        %% yet. Append it and loop around.
-        Next < TS ->
-            extract_samples1(Range#range{first = Next + Incr}, Base, TS, S, It,
-                             append(Base, Next, Samples))
-    end.
-
-append(S, TS, Samples) -> [[{sample, S}, {timestamp, TS}] | Samples].
-
-sum([]) -> blank();
-
-sum([Stats | StatsN]) ->
-    lists:foldl(
-      fun (#stats{diffs = D1, base = B1}, #stats{diffs = D2, base = B2}) ->
-              #stats{diffs = add_trees(D1, gb_trees:iterator(D2)),
-                     base  = B1 + B2}
-      end, Stats, StatsN).
-
-add_trees(Tree, It) ->
-    case gb_trees:next(It) of
-        none        -> Tree;
-        {K, V, It2} -> add_trees(
-                         case gb_trees:lookup(K, Tree) of
-                             {value, V2} -> gb_trees:update(K, V + V2, Tree);
-                             none        -> gb_trees:insert(K, V, Tree)
-                         end, It2)
-    end.
-
-%%----------------------------------------------------------------------------
-%% Event-GCing
-%%----------------------------------------------------------------------------
-
-gc(Cutoff, #stats{diffs = Diffs, base = Base}) ->
-    List = lists:reverse(gb_trees:to_list(Diffs)),
-    gc(Cutoff, List, [], Base).
-
-%% Go through the list, amalgamating all too-old samples with the next
-%% newest keepable one [0] (we move samples forward in time since the
-%% semantics of a sample is "we had this many x by this time"). If the
-%% sample is too old, but would not be too old if moved to a rounder
-%% timestamp which does not exist then invent one and move it there
-%% [1]. But if it's just outright too old, move it to the base [2].
-gc(_Cutoff, [], Keep, Base) ->
-    #stats{diffs = gb_trees:from_orddict(Keep), base = Base};
-gc(Cutoff, [H = {TS, S} | T], Keep, Base) ->
-    {NewKeep, NewBase} =
-        case keep(Cutoff, TS) of
-            keep                       -> {[H | Keep],           Base};
-            drop                       -> {Keep,             S + Base}; %% [2]
-            {move, D} when Keep =:= [] -> {[{TS + D, S}],        Base}; %% [1]
-            {move, _}                  -> [{KTS, KS} | KT] = Keep,
-                                          {[{KTS, KS + S} | KT], Base}  %% [0]
-        end,
-    gc(Cutoff, T, NewKeep, NewBase).
-
-keep({Policy, Now}, TS) ->
-    lists:foldl(fun ({AgeSec, DivisorSec}, Action) ->
-                        prefer_action(
-                          Action,
-                          case (Now - TS) =< (AgeSec * 1000) of
-                              true  -> DivisorMillis = DivisorSec * 1000,
-                                       case TS rem DivisorMillis of
-                                           0   -> keep;
-                                           Rem -> {move, DivisorMillis - Rem}
-                                       end;
-                              false -> drop
-                          end)
-                end, drop, Policy).
-
-prefer_action(keep,              _) -> keep;
-prefer_action(_,              keep) -> keep;
-prefer_action({move, A}, {move, B}) -> {move, lists:min([A, B])};
-prefer_action({move, A},      drop) -> {move, A};
-prefer_action(drop,      {move, A}) -> {move, A};
-prefer_action(drop,           drop) -> drop.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/default-config b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/default-config
deleted file mode 100644 (file)
index b76eba8..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-# rabbitmqadmin.conf.example START
-
-[non_default]
-hostname = localhost
-port = 25672
-username = guest
-password = guest
-declare_vhost = / # Used as default for declare / delete only
-vhost = /         # Used as default for declare / delete / list
-
-[bad_host]
-hostname = rabbit.acme.com
-port = 15672
-username = guest
-password = guest
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_clustering.erl b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_clustering.erl
deleted file mode 100644 (file)
index 642b427..0000000
+++ /dev/null
@@ -1,98 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developers of the Original Code are Rabbit Technologies Ltd.
-%%
-%%   Copyright (C) 2010 Rabbit Technologies Ltd.
-%%
-%%   All Rights Reserved.
-%%
-%%   Contributor(s): ______________________________________.
-%%
--module(rabbit_mgmt_test_clustering).
-
--compile(export_all).
--include("rabbit_mgmt_test.hrl").
-
--import(rabbit_mgmt_test_http, [http_get/1, http_put/3, http_delete/2]).
--import(rabbit_misc, [pget/2]).
-
-%%----------------------------------------------------------------------------
-
-cluster_nodes_with() -> cluster_ab.
-cluster_nodes([_A, _B]) ->
-    ?assertEqual(2, length(http_get("/nodes"))),
-    ok.
-
-ha_with() -> cluster_ab.
-ha([RabbitCfg, HareCfg]) ->
-    Rabbit = pget(nodename, RabbitCfg),
-    Hare = pget(nodename, HareCfg),
-    Policy = [{pattern,    <<".*">>},
-              {definition, [{'ha-mode', <<"all">>}]}],
-    http_put("/policies/%2f/HA", Policy, ?NO_CONTENT),
-    QArgs = [{node, list_to_binary(atom_to_list(Hare))}],
-    http_put("/queues/%2f/ha-queue", QArgs, ?NO_CONTENT),
-    Q = wait_for("/queues/%2f/ha-queue"),
-    assert_node(Hare, pget(node, Q)),
-    assert_single_node(Rabbit, pget(slave_nodes, Q)),
-    assert_single_node(Rabbit, pget(synchronised_slave_nodes, Q)),
-    _HareCfg2 = rabbit_test_configs:restart_node(HareCfg),
-
-    Q2 = wait_for("/queues/%2f/ha-queue"),
-    assert_node(Rabbit, pget(node, Q2)),
-    assert_single_node(Hare, pget(slave_nodes, Q2)),
-    assert_single_node(Hare, pget(synchronised_slave_nodes, Q2)),
-    http_delete("/queues/%2f/ha-queue", ?NO_CONTENT),
-    http_delete("/policies/%2f/HA", ?NO_CONTENT),
-    ok.
-
-%%----------------------------------------------------------------------------
-
-wait_for(Path) ->
-    wait_for(Path, [slave_nodes, synchronised_slave_nodes]).
-
-wait_for(Path, Keys) ->
-    wait_for(Path, Keys, 1000).
-
-wait_for(Path, Keys, 0) ->
-    exit({timeout, {Path, Keys}});
-
-wait_for(Path, Keys, Count) ->
-    Res = http_get(Path),
-    case present(Keys, Res) of
-        false -> timer:sleep(10),
-                 wait_for(Path, Keys, Count - 1);
-        true  -> Res
-    end.
-
-present(Keys, Res) ->
-    lists:all(fun (Key) ->
-                      X = pget(Key, Res),
-                      X =/= [] andalso X =/= undefined
-              end, Keys).
-
-assert_single_node(Exp, Act) ->
-    ?assertEqual(1, length(Act)),
-    assert_node(Exp, hd(Act)).
-
-assert_nodes(Exp, Act0) ->
-    Act = [read_node(A) || A <- Act0],
-    ?debugVal({Exp, Act}),
-    ?assertEqual(length(Exp), length(Act)),
-    [?assert(lists:member(E, Act)) || E <- Exp].
-
-assert_node(Exp, Act) ->
-    ?assertEqual(Exp, read_node(Act)).
-
-read_node(N) ->
-    list_to_atom(hd(string:tokens(binary_to_list(N), "@"))).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_db.erl b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_db.erl
deleted file mode 100644 (file)
index 528ec33..0000000
+++ /dev/null
@@ -1,296 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_test_db).
-
--include("rabbit_mgmt.hrl").
--include_lib("eunit/include/eunit.hrl").
--include_lib("rabbit_common/include/rabbit.hrl").
-
--import(rabbit_misc, [pget/2]).
--import(rabbit_mgmt_test_util, [assert_list/2, assert_item/2, test_item/2]).
-
--define(debugVal2(E),
-       ((fun (__V) ->
-                 ?debugFmt(<<"~s = ~p">>, [(??E), __V]),
-                 __V
-         end)(E))).
-
-%%----------------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------------
-
-queue_coarse_test() ->
-    rabbit_mgmt_db:override_lookups([{exchange, fun dummy_lookup/1},
-                                     {queue,    fun dummy_lookup/1}]),
-    create_q(test, 0),
-    create_q(test2, 0),
-    stats_q(test, 0, 10),
-    stats_q(test2, 0, 1),
-    R = range(0, 1, 1),
-    Exp = fun(N) -> simple_details(messages, N, R) end,
-    assert_item(Exp(10), get_q(test, R)),
-    assert_item(Exp(11), get_vhost(R)),
-    assert_item(Exp(11), get_overview_q(R)),
-    delete_q(test, 0),
-    assert_item(Exp(1), get_vhost(R)),
-    assert_item(Exp(1), get_overview_q(R)),
-    delete_q(test2, 0),
-    assert_item(Exp(0), get_vhost(R)),
-    assert_item(Exp(0), get_overview_q(R)),
-    rabbit_mgmt_db:reset_lookups(),
-    ok.
-
-connection_coarse_test() ->
-    create_conn(test, 0),
-    create_conn(test2, 0),
-    stats_conn(test, 0, 10),
-    stats_conn(test2, 0, 1),
-    R = range(0, 1, 1),
-    Exp = fun(N) -> simple_details(recv_oct, N, R) end,
-    assert_item(Exp(10), get_conn(test, R)),
-    assert_item(Exp(1), get_conn(test2, R)),
-    delete_conn(test, 1),
-    delete_conn(test2, 1),
-    assert_list([], rabbit_mgmt_db:get_all_connections(R)),
-    ok.
-
-fine_stats_aggregation_test() ->
-    rabbit_mgmt_db:override_lookups([{exchange, fun dummy_lookup/1},
-                                     {queue,    fun dummy_lookup/1}]),
-    create_ch(ch1, 0),
-    create_ch(ch2, 0),
-    stats_ch(ch1, 0, [{x, 100}], [{q1, x, 100},
-                                  {q2, x, 10}], [{q1, 2},
-                                                 {q2, 1}]),
-    stats_ch(ch2, 0, [{x, 10}], [{q1, x, 50},
-                                 {q2, x, 5}], []),
-    fine_stats_aggregation_test0(true),
-    delete_q(q2, 0),
-    fine_stats_aggregation_test0(false),
-    delete_ch(ch1, 1),
-    delete_ch(ch2, 1),
-    rabbit_mgmt_db:reset_lookups(),
-    ok.
-
-fine_stats_aggregation_test0(Q2Exists) ->
-    R = range(0, 1, 1),
-    Ch1 = get_ch(ch1, R),
-    Ch2 = get_ch(ch2, R),
-    X   = get_x(x, R),
-    Q1  = get_q(q1, R),
-    V   = get_vhost(R),
-    O   = get_overview(R),
-    assert_fine_stats(m, publish,     100, Ch1, R),
-    assert_fine_stats(m, publish,     10,  Ch2, R),
-    assert_fine_stats(m, publish_in,  110, X, R),
-    assert_fine_stats(m, publish_out, 165, X, R),
-    assert_fine_stats(m, publish,     150, Q1, R),
-    assert_fine_stats(m, deliver_get, 2,   Q1, R),
-    assert_fine_stats(m, deliver_get, 3,   Ch1, R),
-    assert_fine_stats(m, publish,     110, V, R),
-    assert_fine_stats(m, deliver_get, 3,   V, R),
-    assert_fine_stats(m, publish,     110, O, R),
-    assert_fine_stats(m, deliver_get, 3,   O, R),
-    assert_fine_stats({pub, x},   publish, 100, Ch1, R),
-    assert_fine_stats({pub, x},   publish, 10,  Ch2, R),
-    assert_fine_stats({in,  ch1}, publish, 100, X, R),
-    assert_fine_stats({in,  ch2}, publish, 10,  X, R),
-    assert_fine_stats({out, q1},  publish, 150, X, R),
-    assert_fine_stats({in,  x},   publish, 150, Q1, R),
-    assert_fine_stats({del, ch1}, deliver_get, 2, Q1, R),
-    assert_fine_stats({del, q1},  deliver_get, 2, Ch1, R),
-    case Q2Exists of
-        true  -> Q2  = get_q(q2, R),
-                 assert_fine_stats(m, publish,     15,  Q2, R),
-                 assert_fine_stats(m, deliver_get, 1,   Q2, R),
-                 assert_fine_stats({out, q2},  publish, 15,  X, R),
-                 assert_fine_stats({in,  x},   publish, 15,  Q2, R),
-                 assert_fine_stats({del, ch1}, deliver_get, 1, Q2, R),
-                 assert_fine_stats({del, q2},  deliver_get, 1, Ch1, R);
-        false -> assert_fine_stats_neg({out, q2}, X),
-                 assert_fine_stats_neg({del, q2}, Ch1)
-    end,
-    ok.
-
-fine_stats_aggregation_time_test() ->
-    rabbit_mgmt_db:override_lookups([{exchange, fun dummy_lookup/1},
-                                     {queue,    fun dummy_lookup/1}]),
-    create_ch(ch, 0),
-    stats_ch(ch, 0, [{x, 100}], [{q, x, 50}], [{q, 20}]),
-    stats_ch(ch, 5, [{x, 110}], [{q, x, 55}], [{q, 22}]),
-
-    R1 = range(0, 1, 1),
-    assert_fine_stats(m, publish,     100, get_ch(ch, R1), R1),
-    assert_fine_stats(m, publish,     50,  get_q(q, R1), R1),
-    assert_fine_stats(m, deliver_get, 20,  get_q(q, R1), R1),
-
-    R2 = range(5, 6, 1),
-    assert_fine_stats(m, publish,     110, get_ch(ch, R2), R2),
-    assert_fine_stats(m, publish,     55,  get_q(q, R2), R2),
-    assert_fine_stats(m, deliver_get, 22,  get_q(q, R2), R2),
-
-    delete_q(q, 0),
-    delete_ch(ch, 1),
-    rabbit_mgmt_db:reset_lookups(),
-    ok.
-
-assert_fine_stats(m, Type, N, Obj, R) ->
-    Act = pget(message_stats, Obj),
-    assert_item(simple_details(Type, N, R), Act);
-assert_fine_stats({T2, Name}, Type, N, Obj, R) ->
-    Act = find_detailed_stats(Name, pget(expand(T2), Obj)),
-    assert_item(simple_details(Type, N, R), Act).
-
-assert_fine_stats_neg({T2, Name}, Obj) ->
-    detailed_stats_absent(Name, pget(expand(T2), Obj)).
-
-%%----------------------------------------------------------------------------
-%% Events in
-%%----------------------------------------------------------------------------
-
-create_q(Name, Timestamp) ->
-    %% Technically we do not need this, the DB ignores it, but let's
-    %% be symmetrical...
-    event(queue_created, [{name, q(Name)}], Timestamp).
-
-create_conn(Name, Timestamp) ->
-    event(connection_created, [{pid,  pid(Name)},
-                               {name, a2b(Name)}], Timestamp).
-
-create_ch(Name, Timestamp) ->
-    event(channel_created, [{pid,  pid(Name)},
-                            {name, a2b(Name)}], Timestamp).
-
-stats_q(Name, Timestamp, Msgs) ->
-    event(queue_stats, [{name,     q(Name)},
-                        {messages, Msgs}], Timestamp).
-
-stats_conn(Name, Timestamp, Oct) ->
-    event(connection_stats, [{pid ,     pid(Name)},
-                             {recv_oct, Oct}], Timestamp).
-
-stats_ch(Name, Timestamp, XStats, QXStats, QStats) ->
-    XStats1 = [{x(XName), [{publish, N}]} || {XName, N} <- XStats],
-    QXStats1 = [{{q(QName), x(XName)}, [{publish, N}]}
-                || {QName, XName, N} <- QXStats],
-    QStats1 = [{q(QName), [{deliver_no_ack, N}]} || {QName, N} <- QStats],
-    event(channel_stats,
-          [{pid,  pid(Name)},
-           {channel_exchange_stats, XStats1},
-           {channel_queue_exchange_stats, QXStats1},
-           {channel_queue_stats, QStats1}], Timestamp).
-
-delete_q(Name, Timestamp) ->
-    event(queue_deleted, [{name, q(Name)}], Timestamp).
-
-delete_conn(Name, Timestamp) ->
-    event(connection_closed, [{pid, pid_del(Name)}], Timestamp).
-
-delete_ch(Name, Timestamp) ->
-    event(channel_closed, [{pid, pid_del(Name)}], Timestamp).
-
-event(Type, Stats, Timestamp) ->
-    ok = gen_server:call(rabbit_mgmt_db,
-                         {event, #event{type      = Type,
-                                        props     = Stats,
-                                        reference = none,
-                                        timestamp = sec_to_triple(Timestamp)}}).
-
-sec_to_triple(Sec) -> {Sec div 1000000, Sec rem 1000000, 0}.
-
-%%----------------------------------------------------------------------------
-%% Events out
-%%----------------------------------------------------------------------------
-
-range(F, L, I) ->
-    R = #range{first = F * 1000, last = L * 1000, incr = I * 1000},
-    {R, R, R, R}.
-
-get_x(Name, Range) ->
-    [X] = rabbit_mgmt_db:augment_exchanges([x2(Name)], Range, full),
-    X.
-
-get_q(Name, Range) ->
-    [Q] = rabbit_mgmt_db:augment_queues([q2(Name)], Range, full),
-    Q.
-
-get_vhost(Range) ->
-    [VHost] = rabbit_mgmt_db:augment_vhosts([[{name, <<"/">>}]], Range),
-    VHost.
-
-get_conn(Name, Range) -> rabbit_mgmt_db:get_connection(a2b(Name), Range).
-get_ch(Name, Range) -> rabbit_mgmt_db:get_channel(a2b(Name), Range).
-
-get_overview(Range) -> rabbit_mgmt_db:get_overview(Range).
-get_overview_q(Range) -> pget(queue_totals, get_overview(Range)).
-
-details0(R, AR, A, L) ->
-    [{rate,     R},
-     {samples,  [[{sample, S}, {timestamp, T}] || {T, S} <- L]},
-     {avg_rate, AR},
-     {avg,      A}].
-
-simple_details(Thing, N, {#range{first = First, last = Last}, _, _, _}) ->
-    [{Thing, N},
-     {atom_suffix(Thing, "_details"),
-      details0(0.0, 0.0, N * 1.0, [{Last, N}, {First, N}])}].
-
-atom_suffix(Atom, Suffix) ->
-    list_to_atom(atom_to_list(Atom) ++ Suffix).
-
-find_detailed_stats(Name, List) ->
-    [S] = filter_detailed_stats(Name, List),
-    S.
-
-detailed_stats_absent(Name, List) ->
-    [] = filter_detailed_stats(Name, List).
-
-filter_detailed_stats(Name, List) ->
-    [Stats || [{stats, Stats}, {_, Details}] <- List,
-              pget(name, Details) =:= a2b(Name)].
-
-expand(in)  -> incoming;
-expand(out) -> outgoing;
-expand(del) -> deliveries;
-expand(pub) -> publishes.
-
-%%----------------------------------------------------------------------------
-%% Util
-%%----------------------------------------------------------------------------
-
-x(Name) -> rabbit_misc:r(<<"/">>, exchange, a2b(Name)).
-x2(Name) -> q2(Name).
-q(Name) -> rabbit_misc:r(<<"/">>, queue, a2b(Name)).
-q2(Name) -> [{name,  a2b(Name)},
-             {vhost, <<"/">>}].
-
-pid(Name) ->
-    case get({pid, Name}) of
-        undefined -> P = spawn(fun() -> ok end),
-                     put({pid, Name}, P),
-                     P;
-        Pid       -> Pid
-    end.
-
-pid_del(Name) ->
-    Pid = pid(Name),
-    erase({pid, Name}),
-    Pid.
-
-a2b(A) -> list_to_binary(atom_to_list(A)).
-
-dummy_lookup(_Thing) -> {ok, ignore_this}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_db_unit.erl b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_db_unit.erl
deleted file mode 100644 (file)
index 80af615..0000000
+++ /dev/null
@@ -1,135 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2012 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_test_db_unit).
-
--include("rabbit_mgmt.hrl").
--include_lib("eunit/include/eunit.hrl").
-
-gc_test() ->
-    T = fun (Before, After) ->
-                ?assertEqual(After, unstats(
-                                      rabbit_mgmt_stats:gc(
-                                        cutoff(), stats(Before))))
-        end,
-    %% Cut off old sample, move to base
-    T({[{8999, 123}, {9000, 456}], 0},
-      {[{9000, 456}], 123}),
-    %% Amalgamate old samples to rounder one
-    T({[{9001, 100}, {9010, 020}, {10000, 003}], 0},
-      {[{10000, 123}], 0}),
-    %% The same, but a bit less
-    T({[{9000, 100}, {9901, 020}, {9910, 003}], 0},
-      {[{9000, 100}, {9910, 023}], 0}),
-    %% Nothing needs to be done
-    T({[{9000, 100}, {9990, 020}, {9991, 003}], 0},
-      {[{9000, 100}, {9990, 020}, {9991, 003}], 0}),
-    %% Invent a newer sample that's acceptable
-    T({[{9001, 10}, {9010, 02}], 0},
-      {[{9100, 12}], 0}),
-    %% ...but don't if it's too old
-    T({[{8001, 10}, {8010, 02}], 0},
-      {[], 12}),
-    ok.
-
-format_test() ->
-    Interval = 10,
-    T = fun ({First, Last, Incr}, Stats, Results) ->
-                ?assertEqual(format(Results),
-                             rabbit_mgmt_stats:format(
-                               #range{first = First * 1000,
-                                      last  = Last * 1000,
-                                      incr  = Incr * 1000},
-                               stats(Stats),
-                               Interval * 1000))
-        end,
-
-    %% Just three samples, all of which we format. Note the
-    %% instantaneous rate is taken from the penultimate sample.
-    T({10, 30, 10}, {[{10, 10}, {20, 20}, {30, 30}], 1},
-      {[{30, 61}, {20, 31}, {10, 11}], 2.0, 2.5, 103/3, 61}),
-
-    %% Skip over the second (and ditto).
-    T({10, 30, 20}, {[{10, 10}, {20, 20}, {30, 30}], 1},
-      {[{30, 61}, {10, 11}], 2.0, 2.5, 36.0, 61}),
-
-    %% Skip over some and invent some. Note that the instantaneous
-    %% rate drops to 0 since the last event is now in the past.
-    T({0, 40, 20}, {[{10, 10}, {20, 20}, {30, 30}], 1},
-      {[{40, 61}, {20, 31}, {0, 1}], 0.0, 1.5, 31.0, 61}),
-
-    %% And a case where the range starts after the samples
-    T({20, 40, 10}, {[{10, 10}, {20, 20}, {30, 30}], 1},
-      {[{40, 61}, {30, 61}, {20, 31}], 0.0, 1.5, 51.0, 61}),
-
-    %% A single sample - which should lead to some bits not getting generated
-    T({10, 10, 10}, {[{10, 10}, {20, 20}, {30, 30}], 1},
-      {[{10, 11}], 0.0, 11}),
-
-    %% No samples - which should also lead to some bits not getting generated
-    T({10, 0, 10}, {[{10, 10}, {20, 20}, {30, 30}], 1},
-      {[], 0.0, 1}),
-
-    %% TODO more?
-    ok.
-
-format_no_range_test() ->
-    Interval = 10,
-    T = fun (Stats, Results) ->
-                ?assertEqual(format(Results),
-                             rabbit_mgmt_stats:format(
-                               no_range, stats(Stats), Interval * 1000))
-        end,
-
-    %% Just three samples
-    T({[{10, 10}, {20, 20}, {30, 30}], 1},
-      {0.0, 61}),
-    ok.
-
-
-%%--------------------------------------------------------------------
-
-cutoff() ->
-    {[{10, 1}, {100, 10}, {1000, 100}], %% Sec
-     10000000}. %% Millis
-
-stats({Diffs, Base}) ->
-    #stats{diffs = gb_trees:from_orddict(secs_to_millis(Diffs)), base = Base}.
-
-unstats(#stats{diffs = Diffs, base = Base}) ->
-    {millis_to_secs(gb_trees:to_list(Diffs)), Base}.
-
-secs_to_millis(L) -> [{TS * 1000, S} || {TS, S} <- L].
-millis_to_secs(L) -> [{TS div 1000, S} || {TS, S} <- L].
-
-format({Rate, Count}) ->
-    {[{rate,     Rate}],
-     Count};
-
-format({Samples, Rate, Count}) ->
-    {[{rate,     Rate},
-      {samples,  format_samples(Samples)}],
-     Count};
-
-format({Samples, Rate, AvgRate, Avg, Count}) ->
-    {[{rate,     Rate},
-      {samples,  format_samples(Samples)},
-      {avg_rate, AvgRate},
-      {avg,      Avg}],
-     Count}.
-
-format_samples(Samples) ->
-    [[{sample, S}, {timestamp, TS * 1000}] || {TS, S} <- Samples].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_http.erl b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_http.erl
deleted file mode 100644 (file)
index f56a330..0000000
+++ /dev/null
@@ -1,1291 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_test_http).
-
--include("rabbit_mgmt_test.hrl").
-
--export([http_get/1, http_put/3, http_delete/2]).
-
--import(rabbit_mgmt_test_util, [assert_list/2, assert_item/2, test_item/2]).
--import(rabbit_misc, [pget/2]).
-
-overview_test() ->
-    %% Rather crude, but this req doesn't say much and at least this means it
-    %% didn't blow up.
-    true = 0 < length(pget(listeners, http_get("/overview"))),
-    http_put("/users/myuser", [{password, <<"myuser">>},
-                               {tags,     <<"management">>}], ?NO_CONTENT),
-    http_get("/overview", "myuser", "myuser", ?OK),
-    http_delete("/users/myuser", ?NO_CONTENT),
-    %% TODO uncomment when priv works in test
-    %%http_get(""),
-    ok.
-
-cluster_name_test() ->
-    http_put("/users/myuser", [{password, <<"myuser">>},
-                               {tags,     <<"management">>}], ?NO_CONTENT),
-    http_put("/cluster-name", [{name, "foo"}], "myuser", "myuser", ?NOT_AUTHORISED),
-    http_put("/cluster-name", [{name, "foo"}], ?NO_CONTENT),
-    [{name, "foo"}] = http_get("/cluster-name", "myuser", "myuser", ?OK),
-    http_delete("/users/myuser", ?NO_CONTENT),
-    ok.
-
-nodes_test() ->
-    http_put("/users/user", [{password, <<"user">>},
-                             {tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/users/monitor", [{password, <<"monitor">>},
-                                {tags, <<"monitoring">>}], ?NO_CONTENT),
-    DiscNode = [{type, <<"disc">>}, {running, true}],
-    assert_list([DiscNode], http_get("/nodes")),
-    assert_list([DiscNode], http_get("/nodes", "monitor", "monitor", ?OK)),
-    http_get("/nodes", "user", "user", ?NOT_AUTHORISED),
-    [Node] = http_get("/nodes"),
-    Path = "/nodes/" ++ binary_to_list(pget(name, Node)),
-    assert_item(DiscNode, http_get(Path, ?OK)),
-    assert_item(DiscNode, http_get(Path, "monitor", "monitor", ?OK)),
-    http_get(Path, "user", "user", ?NOT_AUTHORISED),
-    http_delete("/users/user", ?NO_CONTENT),
-    http_delete("/users/monitor", ?NO_CONTENT),
-    ok.
-
-auth_test() ->
-    http_put("/users/user", [{password, <<"user">>},
-                             {tags, <<"">>}], ?NO_CONTENT),
-    test_auth(?NOT_AUTHORISED, []),
-    test_auth(?NOT_AUTHORISED, [auth_header("user", "user")]),
-    test_auth(?NOT_AUTHORISED, [auth_header("guest", "gust")]),
-    test_auth(?OK, [auth_header("guest", "guest")]),
-    http_delete("/users/user", ?NO_CONTENT),
-    ok.
-
-%% This test is rather over-verbose as we're trying to test understanding of
-%% Webmachine
-vhosts_test() ->
-    assert_list([[{name, <<"/">>}]], http_get("/vhosts")),
-    %% Create a new one
-    http_put("/vhosts/myvhost", none, ?NO_CONTENT),
-    %% PUT should be idempotent
-    http_put("/vhosts/myvhost", none, ?NO_CONTENT),
-    %% Check it's there
-    assert_list([[{name, <<"/">>}], [{name, <<"myvhost">>}]],
-                http_get("/vhosts")),
-    %% Check individually
-    assert_item([{name, <<"/">>}], http_get("/vhosts/%2f", ?OK)),
-    assert_item([{name, <<"myvhost">>}],http_get("/vhosts/myvhost")),
-    %% Delete it
-    http_delete("/vhosts/myvhost", ?NO_CONTENT),
-    %% It's not there
-    http_get("/vhosts/myvhost", ?NOT_FOUND),
-    http_delete("/vhosts/myvhost", ?NOT_FOUND).
-
-vhosts_trace_test() ->
-    http_put("/vhosts/myvhost", none, ?NO_CONTENT),
-    Disabled = [{name,  <<"myvhost">>}, {tracing, false}],
-    Enabled  = [{name,  <<"myvhost">>}, {tracing, true}],
-    Disabled = http_get("/vhosts/myvhost"),
-    http_put("/vhosts/myvhost", [{tracing, true}], ?NO_CONTENT),
-    Enabled = http_get("/vhosts/myvhost"),
-    http_put("/vhosts/myvhost", [{tracing, true}], ?NO_CONTENT),
-    Enabled = http_get("/vhosts/myvhost"),
-    http_put("/vhosts/myvhost", [{tracing, false}], ?NO_CONTENT),
-    Disabled = http_get("/vhosts/myvhost"),
-    http_delete("/vhosts/myvhost", ?NO_CONTENT).
-
-users_test() ->
-    assert_item([{name, <<"guest">>}, {tags, <<"administrator">>}],
-                http_get("/whoami")),
-    http_get("/users/myuser", ?NOT_FOUND),
-    http_put_raw("/users/myuser", "Something not JSON", ?BAD_REQUEST),
-    http_put("/users/myuser", [{flim, <<"flam">>}], ?BAD_REQUEST),
-    http_put("/users/myuser", [{tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/users/myuser", [{password_hash, <<"not_hash">>}], ?BAD_REQUEST),
-    http_put("/users/myuser", [{password_hash,
-                                <<"IECV6PZI/Invh0DL187KFpkO5Jc=">>},
-                               {tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/users/myuser", [{password, <<"password">>},
-                               {tags, <<"administrator, foo">>}], ?NO_CONTENT),
-    assert_item([{name, <<"myuser">>}, {tags, <<"administrator,foo">>}],
-                http_get("/users/myuser")),
-    assert_list([[{name, <<"myuser">>}, {tags, <<"administrator,foo">>}],
-                 [{name, <<"guest">>}, {tags, <<"administrator">>}]],
-                http_get("/users")),
-    test_auth(?OK, [auth_header("myuser", "password")]),
-    http_delete("/users/myuser", ?NO_CONTENT),
-    test_auth(?NOT_AUTHORISED, [auth_header("myuser", "password")]),
-    http_get("/users/myuser", ?NOT_FOUND),
-    ok.
-
-users_legacy_administrator_test() ->
-    http_put("/users/myuser1", [{administrator, <<"true">>}], ?NO_CONTENT),
-    http_put("/users/myuser2", [{administrator, <<"false">>}], ?NO_CONTENT),
-    assert_item([{name, <<"myuser1">>}, {tags, <<"administrator">>}],
-                http_get("/users/myuser1")),
-    assert_item([{name, <<"myuser2">>}, {tags, <<"">>}],
-                http_get("/users/myuser2")),
-    http_delete("/users/myuser1", ?NO_CONTENT),
-    http_delete("/users/myuser2", ?NO_CONTENT),
-    ok.
-
-permissions_validation_test() ->
-    Good = [{configure, <<".*">>}, {write, <<".*">>}, {read, <<".*">>}],
-    http_put("/permissions/wrong/guest", Good, ?BAD_REQUEST),
-    http_put("/permissions/%2f/wrong", Good, ?BAD_REQUEST),
-    http_put("/permissions/%2f/guest",
-             [{configure, <<"[">>}, {write, <<".*">>}, {read, <<".*">>}],
-             ?BAD_REQUEST),
-    http_put("/permissions/%2f/guest", Good, ?NO_CONTENT),
-    ok.
-
-permissions_list_test() ->
-    [[{user,<<"guest">>},
-      {vhost,<<"/">>},
-      {configure,<<".*">>},
-      {write,<<".*">>},
-      {read,<<".*">>}]] =
-        http_get("/permissions"),
-
-    http_put("/users/myuser1", [{password, <<"">>}, {tags, <<"administrator">>}],
-             ?NO_CONTENT),
-    http_put("/users/myuser2", [{password, <<"">>}, {tags, <<"administrator">>}],
-             ?NO_CONTENT),
-    http_put("/vhosts/myvhost1", none, ?NO_CONTENT),
-    http_put("/vhosts/myvhost2", none, ?NO_CONTENT),
-
-    Perms = [{configure, <<"foo">>}, {write, <<"foo">>}, {read, <<"foo">>}],
-    http_put("/permissions/myvhost1/myuser1", Perms, ?NO_CONTENT),
-    http_put("/permissions/myvhost2/myuser1", Perms, ?NO_CONTENT),
-    http_put("/permissions/myvhost1/myuser2", Perms, ?NO_CONTENT),
-
-    4 = length(http_get("/permissions")),
-    2 = length(http_get("/users/myuser1/permissions")),
-    1 = length(http_get("/users/myuser2/permissions")),
-
-    http_get("/users/notmyuser/permissions", ?NOT_FOUND),
-    http_get("/vhosts/notmyvhost/permissions", ?NOT_FOUND),
-
-    http_delete("/users/myuser1", ?NO_CONTENT),
-    http_delete("/users/myuser2", ?NO_CONTENT),
-    http_delete("/vhosts/myvhost1", ?NO_CONTENT),
-    http_delete("/vhosts/myvhost2", ?NO_CONTENT),
-    ok.
-
-permissions_test() ->
-    http_put("/users/myuser", [{password, <<"myuser">>}, {tags, <<"administrator">>}],
-             ?NO_CONTENT),
-    http_put("/vhosts/myvhost", none, ?NO_CONTENT),
-
-    http_put("/permissions/myvhost/myuser",
-             [{configure, <<"foo">>}, {write, <<"foo">>}, {read, <<"foo">>}],
-             ?NO_CONTENT),
-
-    Permission = [{user,<<"myuser">>},
-                  {vhost,<<"myvhost">>},
-                  {configure,<<"foo">>},
-                  {write,<<"foo">>},
-                  {read,<<"foo">>}],
-    Default = [{user,<<"guest">>},
-               {vhost,<<"/">>},
-               {configure,<<".*">>},
-               {write,<<".*">>},
-               {read,<<".*">>}],
-    Permission = http_get("/permissions/myvhost/myuser"),
-    assert_list([Permission, Default], http_get("/permissions")),
-    assert_list([Permission], http_get("/users/myuser/permissions")),
-    http_delete("/permissions/myvhost/myuser", ?NO_CONTENT),
-    http_get("/permissions/myvhost/myuser", ?NOT_FOUND),
-
-    http_delete("/users/myuser", ?NO_CONTENT),
-    http_delete("/vhosts/myvhost", ?NO_CONTENT),
-    ok.
-
-connections_test() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    LocalPort = local_port(Conn),
-    Path = binary_to_list(
-             rabbit_mgmt_format:print(
-               "/connections/127.0.0.1%3A~w%20->%20127.0.0.1%3A5672",
-               [LocalPort])),
-    http_get(Path, ?OK),
-    http_delete(Path, ?NO_CONTENT),
-    %% TODO rabbit_reader:shutdown/2 returns before the connection is
-    %% closed. It may not be worth fixing.
-    timer:sleep(200),
-    http_get(Path, ?NOT_FOUND).
-
-test_auth(Code, Headers) ->
-    {ok, {{_, Code, _}, _, _}} = req(get, "/overview", Headers).
-
-exchanges_test() ->
-    %% Can pass booleans or strings
-    Good = [{type, <<"direct">>}, {durable, <<"true">>}],
-    http_put("/vhosts/myvhost", none, ?NO_CONTENT),
-    http_get("/exchanges/myvhost/foo", ?NOT_AUTHORISED),
-    http_put("/exchanges/myvhost/foo", Good, ?NOT_AUTHORISED),
-    http_put("/permissions/myvhost/guest",
-             [{configure, <<".*">>}, {write, <<".*">>}, {read, <<".*">>}],
-             ?NO_CONTENT),
-    http_get("/exchanges/myvhost/foo", ?NOT_FOUND),
-    http_put("/exchanges/myvhost/foo", Good, ?NO_CONTENT),
-    http_put("/exchanges/myvhost/foo", Good, ?NO_CONTENT),
-    http_get("/exchanges/%2f/foo", ?NOT_FOUND),
-    assert_item([{name,<<"foo">>},
-                 {vhost,<<"myvhost">>},
-                 {type,<<"direct">>},
-                 {durable,true},
-                 {auto_delete,false},
-                 {internal,false},
-                 {arguments,[]}],
-                http_get("/exchanges/myvhost/foo")),
-
-    http_put("/exchanges/badvhost/bar", Good, ?NOT_FOUND),
-    http_put("/exchanges/myvhost/bar", [{type, <<"bad_exchange_type">>}],
-             ?BAD_REQUEST),
-    http_put("/exchanges/myvhost/bar", [{type, <<"direct">>},
-                                        {durable, <<"troo">>}],
-             ?BAD_REQUEST),
-    http_put("/exchanges/myvhost/foo", [{type, <<"direct">>}],
-             ?BAD_REQUEST),
-
-    http_delete("/exchanges/myvhost/foo", ?NO_CONTENT),
-    http_delete("/exchanges/myvhost/foo", ?NOT_FOUND),
-
-    http_delete("/vhosts/myvhost", ?NO_CONTENT),
-    http_get("/exchanges/badvhost", ?NOT_FOUND),
-    ok.
-
-queues_test() ->
-    Good = [{durable, true}],
-    http_get("/queues/%2f/foo", ?NOT_FOUND),
-    http_put("/queues/%2f/foo", Good, ?NO_CONTENT),
-    http_put("/queues/%2f/foo", Good, ?NO_CONTENT),
-    http_get("/queues/%2f/foo", ?OK),
-
-    http_put("/queues/badvhost/bar", Good, ?NOT_FOUND),
-    http_put("/queues/%2f/bar",
-             [{durable, <<"troo">>}],
-             ?BAD_REQUEST),
-    http_put("/queues/%2f/foo",
-             [{durable, false}],
-             ?BAD_REQUEST),
-
-    http_put("/queues/%2f/baz", Good, ?NO_CONTENT),
-
-    Queues = http_get("/queues/%2f"),
-    Queue = http_get("/queues/%2f/foo"),
-    assert_list([[{name,        <<"foo">>},
-                  {vhost,       <<"/">>},
-                  {durable,     true},
-                  {auto_delete, false},
-                  {arguments,   []}],
-                 [{name,        <<"baz">>},
-                  {vhost,       <<"/">>},
-                  {durable,     true},
-                  {auto_delete, false},
-                  {arguments,   []}]], Queues),
-    assert_item([{name,        <<"foo">>},
-                 {vhost,       <<"/">>},
-                 {durable,     true},
-                 {auto_delete, false},
-                 {arguments,   []}], Queue),
-
-    http_delete("/queues/%2f/foo", ?NO_CONTENT),
-    http_delete("/queues/%2f/baz", ?NO_CONTENT),
-    http_delete("/queues/%2f/foo", ?NOT_FOUND),
-    http_get("/queues/badvhost", ?NOT_FOUND),
-    ok.
-
-bindings_test() ->
-    XArgs = [{type, <<"direct">>}],
-    QArgs = [],
-    http_put("/exchanges/%2f/myexchange", XArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/myqueue", QArgs, ?NO_CONTENT),
-    BArgs = [{routing_key, <<"routing">>}, {arguments, []}],
-    http_post("/bindings/%2f/e/myexchange/q/myqueue", BArgs, ?CREATED),
-    http_get("/bindings/%2f/e/myexchange/q/myqueue/routing", ?OK),
-    http_get("/bindings/%2f/e/myexchange/q/myqueue/rooting", ?NOT_FOUND),
-    Binding =
-        [{source,<<"myexchange">>},
-         {vhost,<<"/">>},
-         {destination,<<"myqueue">>},
-         {destination_type,<<"queue">>},
-         {routing_key,<<"routing">>},
-         {arguments,[]},
-         {properties_key,<<"routing">>}],
-    DBinding =
-        [{source,<<"">>},
-         {vhost,<<"/">>},
-         {destination,<<"myqueue">>},
-         {destination_type,<<"queue">>},
-         {routing_key,<<"myqueue">>},
-         {arguments,[]},
-         {properties_key,<<"myqueue">>}],
-    Binding = http_get("/bindings/%2f/e/myexchange/q/myqueue/routing"),
-    assert_list([Binding],
-                http_get("/bindings/%2f/e/myexchange/q/myqueue")),
-    assert_list([Binding, DBinding],
-                http_get("/queues/%2f/myqueue/bindings")),
-    assert_list([Binding],
-                http_get("/exchanges/%2f/myexchange/bindings/source")),
-    http_delete("/bindings/%2f/e/myexchange/q/myqueue/routing", ?NO_CONTENT),
-    http_delete("/bindings/%2f/e/myexchange/q/myqueue/routing", ?NOT_FOUND),
-    http_delete("/exchanges/%2f/myexchange", ?NO_CONTENT),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    http_get("/bindings/badvhost", ?NOT_FOUND),
-    http_get("/bindings/badvhost/myqueue/myexchange/routing", ?NOT_FOUND),
-    http_get("/bindings/%2f/e/myexchange/q/myqueue/routing", ?NOT_FOUND),
-    ok.
-
-bindings_post_test() ->
-    XArgs = [{type, <<"direct">>}],
-    QArgs = [],
-    BArgs = [{routing_key, <<"routing">>}, {arguments, [{foo, <<"bar">>}]}],
-    http_put("/exchanges/%2f/myexchange", XArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/myqueue", QArgs, ?NO_CONTENT),
-    http_post("/bindings/%2f/e/myexchange/q/badqueue", BArgs, ?NOT_FOUND),
-    http_post("/bindings/%2f/e/badexchange/q/myqueue", BArgs, ?NOT_FOUND),
-    Headers1 = http_post("/bindings/%2f/e/myexchange/q/myqueue", [], ?CREATED),
-    "../../../../%2F/e/myexchange/q/myqueue/~" = pget("location", Headers1),
-    Headers2 = http_post("/bindings/%2f/e/myexchange/q/myqueue", BArgs, ?CREATED),
-    PropertiesKey = "routing~V4mGFgnPNrdtRmluZIxTDA",
-    PropertiesKeyBin = list_to_binary(PropertiesKey),
-    "../../../../%2F/e/myexchange/q/myqueue/" ++ PropertiesKey =
-        pget("location", Headers2),
-    URI = "/bindings/%2F/e/myexchange/q/myqueue/" ++ PropertiesKey,
-    [{source,<<"myexchange">>},
-     {vhost,<<"/">>},
-     {destination,<<"myqueue">>},
-     {destination_type,<<"queue">>},
-     {routing_key,<<"routing">>},
-     {arguments,[{foo,<<"bar">>}]},
-     {properties_key,PropertiesKeyBin}] = http_get(URI, ?OK),
-    http_get(URI ++ "x", ?NOT_FOUND),
-    http_delete(URI, ?NO_CONTENT),
-    http_delete("/exchanges/%2f/myexchange", ?NO_CONTENT),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    ok.
-
-bindings_e2e_test() ->
-    BArgs = [{routing_key, <<"routing">>}, {arguments, []}],
-    http_post("/bindings/%2f/e/amq.direct/e/badexchange", BArgs, ?NOT_FOUND),
-    http_post("/bindings/%2f/e/badexchange/e/amq.fanout", BArgs, ?NOT_FOUND),
-    Headers = http_post("/bindings/%2f/e/amq.direct/e/amq.fanout", BArgs, ?CREATED),
-    "../../../../%2F/e/amq.direct/e/amq.fanout/routing" =
-        pget("location", Headers),
-    [{source,<<"amq.direct">>},
-     {vhost,<<"/">>},
-     {destination,<<"amq.fanout">>},
-     {destination_type,<<"exchange">>},
-     {routing_key,<<"routing">>},
-     {arguments,[]},
-     {properties_key,<<"routing">>}] =
-        http_get("/bindings/%2f/e/amq.direct/e/amq.fanout/routing", ?OK),
-    http_delete("/bindings/%2f/e/amq.direct/e/amq.fanout/routing", ?NO_CONTENT),
-    http_post("/bindings/%2f/e/amq.direct/e/amq.headers", BArgs, ?CREATED),
-    Binding =
-        [{source,<<"amq.direct">>},
-         {vhost,<<"/">>},
-         {destination,<<"amq.headers">>},
-         {destination_type,<<"exchange">>},
-         {routing_key,<<"routing">>},
-         {arguments,[]},
-         {properties_key,<<"routing">>}],
-    Binding = http_get("/bindings/%2f/e/amq.direct/e/amq.headers/routing"),
-    assert_list([Binding],
-                http_get("/bindings/%2f/e/amq.direct/e/amq.headers")),
-    assert_list([Binding],
-                http_get("/exchanges/%2f/amq.direct/bindings/source")),
-    assert_list([Binding],
-                http_get("/exchanges/%2f/amq.headers/bindings/destination")),
-    http_delete("/bindings/%2f/e/amq.direct/e/amq.headers/routing", ?NO_CONTENT),
-    http_get("/bindings/%2f/e/amq.direct/e/amq.headers/rooting", ?NOT_FOUND),
-    ok.
-
-permissions_administrator_test() ->
-    http_put("/users/isadmin", [{password, <<"isadmin">>},
-                                {tags, <<"administrator">>}], ?NO_CONTENT),
-    http_put("/users/notadmin", [{password, <<"notadmin">>},
-                                 {tags, <<"administrator">>}], ?NO_CONTENT),
-    http_put("/users/notadmin", [{password, <<"notadmin">>},
-                                 {tags, <<"management">>}], ?NO_CONTENT),
-    Test =
-        fun(Path) ->
-                http_get(Path, "notadmin", "notadmin", ?NOT_AUTHORISED),
-                http_get(Path, "isadmin", "isadmin", ?OK),
-                http_get(Path, "guest", "guest", ?OK)
-        end,
-    %% All users can get a list of vhosts. It may be filtered.
-    %%Test("/vhosts"),
-    Test("/vhosts/%2f"),
-    Test("/vhosts/%2f/permissions"),
-    Test("/users"),
-    Test("/users/guest"),
-    Test("/users/guest/permissions"),
-    Test("/permissions"),
-    Test("/permissions/%2f/guest"),
-    http_delete("/users/notadmin", ?NO_CONTENT),
-    http_delete("/users/isadmin", ?NO_CONTENT),
-    ok.
-
-permissions_vhost_test() ->
-    QArgs = [],
-    PermArgs = [{configure, <<".*">>}, {write, <<".*">>}, {read, <<".*">>}],
-    http_put("/users/myuser", [{password, <<"myuser">>},
-                               {tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/vhosts/myvhost1", none, ?NO_CONTENT),
-    http_put("/vhosts/myvhost2", none, ?NO_CONTENT),
-    http_put("/permissions/myvhost1/myuser", PermArgs, ?NO_CONTENT),
-    http_put("/permissions/myvhost1/guest", PermArgs, ?NO_CONTENT),
-    http_put("/permissions/myvhost2/guest", PermArgs, ?NO_CONTENT),
-    assert_list([[{name, <<"/">>}],
-                 [{name, <<"myvhost1">>}],
-                 [{name, <<"myvhost2">>}]], http_get("/vhosts", ?OK)),
-    assert_list([[{name, <<"myvhost1">>}]],
-                http_get("/vhosts", "myuser", "myuser", ?OK)),
-    http_put("/queues/myvhost1/myqueue", QArgs, ?NO_CONTENT),
-    http_put("/queues/myvhost2/myqueue", QArgs, ?NO_CONTENT),
-    Test1 =
-        fun(Path) ->
-                Results = http_get(Path, "myuser", "myuser", ?OK),
-                [case pget(vhost, Result) of
-                     <<"myvhost2">> ->
-                         throw({got_result_from_vhost2_in, Path, Result});
-                     _ ->
-                         ok
-                 end || Result <- Results]
-        end,
-    Test2 =
-        fun(Path1, Path2) ->
-                http_get(Path1 ++ "/myvhost1/" ++ Path2, "myuser", "myuser",
-                         ?OK),
-                http_get(Path1 ++ "/myvhost2/" ++ Path2, "myuser", "myuser",
-                         ?NOT_AUTHORISED)
-        end,
-    Test1("/exchanges"),
-    Test2("/exchanges", ""),
-    Test2("/exchanges", "amq.direct"),
-    Test1("/queues"),
-    Test2("/queues", ""),
-    Test2("/queues", "myqueue"),
-    Test1("/bindings"),
-    Test2("/bindings", ""),
-    Test2("/queues", "myqueue/bindings"),
-    Test2("/exchanges", "amq.default/bindings/source"),
-    Test2("/exchanges", "amq.default/bindings/destination"),
-    Test2("/bindings", "e/amq.default/q/myqueue"),
-    Test2("/bindings", "e/amq.default/q/myqueue/myqueue"),
-    http_delete("/vhosts/myvhost1", ?NO_CONTENT),
-    http_delete("/vhosts/myvhost2", ?NO_CONTENT),
-    http_delete("/users/myuser", ?NO_CONTENT),
-    ok.
-
-permissions_amqp_test() ->
-    %% Just test that it works at all, not that it works in all possible cases.
-    QArgs = [],
-    PermArgs = [{configure, <<"foo.*">>}, {write, <<"foo.*">>},
-                {read,      <<"foo.*">>}],
-    http_put("/users/myuser", [{password, <<"myuser">>},
-                               {tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/permissions/%2f/myuser", PermArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/bar-queue", QArgs, "myuser", "myuser",
-             ?NOT_AUTHORISED),
-    http_put("/queues/%2f/bar-queue", QArgs, "nonexistent", "nonexistent",
-             ?NOT_AUTHORISED),
-    http_delete("/users/myuser", ?NO_CONTENT),
-    ok.
-
-get_conn(Username, Password) ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{
-                                        username = list_to_binary(Username),
-                                        password = list_to_binary(Password)}),
-    LocalPort = local_port(Conn),
-    ConnPath = rabbit_misc:format(
-                 "/connections/127.0.0.1%3A~w%20->%20127.0.0.1%3A5672",
-                 [LocalPort]),
-    ChPath = rabbit_misc:format(
-               "/channels/127.0.0.1%3A~w%20->%20127.0.0.1%3A5672%20(1)",
-               [LocalPort]),
-    ConnChPath = rabbit_misc:format(
-                   "/connections/127.0.0.1%3A~w%20->%20127.0.0.1%3A5672/channels",
-                   [LocalPort]),
-    {Conn, ConnPath, ChPath, ConnChPath}.
-
-permissions_connection_channel_consumer_test() ->
-    PermArgs = [{configure, <<".*">>}, {write, <<".*">>}, {read, <<".*">>}],
-    http_put("/users/user", [{password, <<"user">>},
-                             {tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/permissions/%2f/user", PermArgs, ?NO_CONTENT),
-    http_put("/users/monitor", [{password, <<"monitor">>},
-                                {tags, <<"monitoring">>}], ?NO_CONTENT),
-    http_put("/permissions/%2f/monitor", PermArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/test", [], ?NO_CONTENT),
-
-    {Conn1, UserConn, UserCh, UserConnCh} = get_conn("user", "user"),
-    {Conn2, MonConn, MonCh, MonConnCh} = get_conn("monitor", "monitor"),
-    {Conn3, AdmConn, AdmCh, AdmConnCh} = get_conn("guest", "guest"),
-    {ok, Ch1} = amqp_connection:open_channel(Conn1),
-    {ok, Ch2} = amqp_connection:open_channel(Conn2),
-    {ok, Ch3} = amqp_connection:open_channel(Conn3),
-    [amqp_channel:subscribe(
-       Ch, #'basic.consume'{queue = <<"test">>}, self()) ||
-        Ch <- [Ch1, Ch2, Ch3]],
-    AssertLength = fun (Path, User, Len) ->
-                           ?assertEqual(Len,
-                                        length(http_get(Path, User, User, ?OK)))
-                   end,
-    [begin
-         AssertLength(P, "user", 1),
-         AssertLength(P, "monitor", 3),
-         AssertLength(P, "guest", 3)
-     end || P <- ["/connections", "/channels", "/consumers", "/consumers/%2f"]],
-
-    AssertRead = fun(Path, UserStatus) ->
-                         http_get(Path, "user", "user", UserStatus),
-                         http_get(Path, "monitor", "monitor", ?OK),
-                         http_get(Path, ?OK)
-                 end,
-    AssertRead(UserConn, ?OK),
-    AssertRead(MonConn, ?NOT_AUTHORISED),
-    AssertRead(AdmConn, ?NOT_AUTHORISED),
-    AssertRead(UserCh, ?OK),
-    AssertRead(MonCh, ?NOT_AUTHORISED),
-    AssertRead(AdmCh, ?NOT_AUTHORISED),
-    AssertRead(UserConnCh, ?OK),
-    AssertRead(MonConnCh, ?NOT_AUTHORISED),
-    AssertRead(AdmConnCh, ?NOT_AUTHORISED),
-
-    AssertClose = fun(Path, User, Status) ->
-                          http_delete(Path, User, User, Status)
-                  end,
-    AssertClose(UserConn, "monitor", ?NOT_AUTHORISED),
-    AssertClose(MonConn, "user", ?NOT_AUTHORISED),
-    AssertClose(AdmConn, "guest", ?NO_CONTENT),
-    AssertClose(MonConn, "guest", ?NO_CONTENT),
-    AssertClose(UserConn, "user", ?NO_CONTENT),
-
-    http_delete("/users/user", ?NO_CONTENT),
-    http_delete("/users/monitor", ?NO_CONTENT),
-    http_get("/connections/foo", ?NOT_FOUND),
-    http_get("/channels/foo", ?NOT_FOUND),
-    http_delete("/queues/%2f/test", ?NO_CONTENT),
-    ok.
-
-consumers_test() ->
-    http_put("/queues/%2f/test", [], ?NO_CONTENT),
-    {Conn, _ConnPath, _ChPath, _ConnChPath} = get_conn("guest", "guest"),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    amqp_channel:subscribe(
-      Ch, #'basic.consume'{queue        = <<"test">>,
-                           no_ack       = false,
-                           consumer_tag = <<"my-ctag">> }, self()),
-    assert_list([[{exclusive,    false},
-                  {ack_required, true},
-                  {consumer_tag, <<"my-ctag">>}]], http_get("/consumers")),
-    amqp_connection:close(Conn),
-    http_delete("/queues/%2f/test", ?NO_CONTENT),
-    ok.
-
-defs(Key, URI, CreateMethod, Args) ->
-    defs(Key, URI, CreateMethod, Args,
-         fun(URI2) -> http_delete(URI2, ?NO_CONTENT) end).
-
-defs_v(Key, URI, CreateMethod, Args) ->
-    Rep1 = fun (S, S2) -> re:replace(S, "<vhost>", S2, [{return, list}]) end,
-    Rep2 = fun (L, V2) -> lists:keymap(fun (vhost) -> V2;
-                                           (V)     -> V end, 2, L) end,
-    %% Test against default vhost
-    defs(Key, Rep1(URI, "%2f"), CreateMethod, Rep2(Args, <<"/">>)),
-
-    %% Test against new vhost
-    http_put("/vhosts/test", none, ?NO_CONTENT),
-    PermArgs = [{configure, <<".*">>}, {write, <<".*">>}, {read, <<".*">>}],
-    http_put("/permissions/test/guest", PermArgs, ?NO_CONTENT),
-    defs(Key, Rep1(URI, "test"), CreateMethod, Rep2(Args, <<"test">>),
-         fun(URI2) -> http_delete(URI2, ?NO_CONTENT),
-                      http_delete("/vhosts/test", ?NO_CONTENT) end).
-
-defs(Key, URI, CreateMethod, Args, DeleteFun) ->
-    %% Create the item
-    URI2 = case CreateMethod of
-               put   -> http_put(URI, Args, ?NO_CONTENT),
-                        URI;
-               post  -> Headers = http_post(URI, Args, ?CREATED),
-                        rabbit_web_dispatch_util:unrelativise(
-                          URI, pget("location", Headers))
-           end,
-    %% Make sure it ends up in definitions
-    Definitions = http_get("/definitions", ?OK),
-    true = lists:any(fun(I) -> test_item(Args, I) end, pget(Key, Definitions)),
-
-    %% Delete it
-    DeleteFun(URI2),
-
-    %% Post the definitions back, it should get recreated in correct form
-    http_post("/definitions", Definitions, ?CREATED),
-    assert_item(Args, http_get(URI2, ?OK)),
-
-    %% And delete it again
-    DeleteFun(URI2),
-
-    ok.
-
-definitions_test() ->
-    rabbit_runtime_parameters_test:register(),
-    rabbit_runtime_parameters_test:register_policy_validator(),
-
-    defs_v(queues, "/queues/<vhost>/my-queue", put,
-           [{name,    <<"my-queue">>},
-            {durable, true}]),
-    defs_v(exchanges, "/exchanges/<vhost>/my-exchange", put,
-           [{name, <<"my-exchange">>},
-            {type, <<"direct">>}]),
-    defs_v(bindings, "/bindings/<vhost>/e/amq.direct/e/amq.fanout", post,
-           [{routing_key, <<"routing">>}, {arguments, []}]),
-    defs_v(policies, "/policies/<vhost>/my-policy", put,
-           [{vhost,      vhost},
-            {name,       <<"my-policy">>},
-            {pattern,    <<".*">>},
-            {definition, [{testpos, [1, 2, 3]}]},
-            {priority,   1}]),
-    defs_v(parameters, "/parameters/test/<vhost>/good", put,
-           [{vhost,     vhost},
-            {component, <<"test">>},
-            {name,      <<"good">>},
-            {value,     <<"ignore">>}]),
-    defs(users, "/users/myuser", put,
-         [{name,          <<"myuser">>},
-          {password_hash, <<"WAbU0ZIcvjTpxM3Q3SbJhEAM2tQ=">>},
-          {tags,          <<"management">>}]),
-    defs(vhosts, "/vhosts/myvhost", put,
-         [{name, <<"myvhost">>}]),
-    defs(permissions, "/permissions/%2f/guest", put,
-         [{user,      <<"guest">>},
-          {vhost,     <<"/">>},
-          {configure, <<"c">>},
-          {write,     <<"w">>},
-          {read,      <<"r">>}]),
-
-    %% We just messed with guest's permissions
-    http_put("/permissions/%2f/guest",
-             [{configure, <<".*">>},
-              {write,     <<".*">>},
-              {read,      <<".*">>}], ?NO_CONTENT),
-
-    BrokenConfig =
-        [{users,       []},
-         {vhosts,      []},
-         {permissions, []},
-         {queues,      []},
-         {exchanges,   [[{name,        <<"amq.direct">>},
-                         {vhost,       <<"/">>},
-                         {type,        <<"definitely not direct">>},
-                         {durable,     true},
-                         {auto_delete, false},
-                         {arguments,   []}
-                        ]]},
-         {bindings,    []}],
-    http_post("/definitions", BrokenConfig, ?BAD_REQUEST),
-
-    rabbit_runtime_parameters_test:unregister_policy_validator(),
-    rabbit_runtime_parameters_test:unregister(),
-    ok.
-
-definitions_remove_things_test() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    amqp_channel:call(Ch, #'queue.declare'{ queue = <<"my-exclusive">>,
-                                            exclusive = true }),
-    http_get("/queues/%2f/my-exclusive", ?OK),
-    Definitions = http_get("/definitions", ?OK),
-    [] = pget(queues, Definitions),
-    [] = pget(exchanges, Definitions),
-    [] = pget(bindings, Definitions),
-    amqp_channel:close(Ch),
-    amqp_connection:close(Conn),
-    ok.
-
-definitions_server_named_queue_test() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    #'queue.declare_ok'{ queue = QName } =
-        amqp_channel:call(Ch, #'queue.declare'{}),
-    amqp_channel:close(Ch),
-    amqp_connection:close(Conn),
-    Path = "/queues/%2f/" ++ mochiweb_util:quote_plus(QName),
-    http_get(Path, ?OK),
-    Definitions = http_get("/definitions", ?OK),
-    http_delete(Path, ?NO_CONTENT),
-    http_get(Path, ?NOT_FOUND),
-    http_post("/definitions", Definitions, ?CREATED),
-    http_get(Path, ?OK),
-    http_delete(Path, ?NO_CONTENT),
-    ok.
-
-aliveness_test() ->
-    [{status, <<"ok">>}] = http_get("/aliveness-test/%2f", ?OK),
-    http_get("/aliveness-test/foo", ?NOT_FOUND),
-    http_delete("/queues/%2f/aliveness-test", ?NO_CONTENT),
-    ok.
-
-arguments_test() ->
-    XArgs = [{type, <<"headers">>},
-             {arguments, [{'alternate-exchange', <<"amq.direct">>}]}],
-    QArgs = [{arguments, [{'x-expires', 1800000}]}],
-    BArgs = [{routing_key, <<"">>},
-             {arguments, [{'x-match', <<"all">>},
-                          {foo, <<"bar">>}]}],
-    http_put("/exchanges/%2f/myexchange", XArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/myqueue", QArgs, ?NO_CONTENT),
-    http_post("/bindings/%2f/e/myexchange/q/myqueue", BArgs, ?CREATED),
-    Definitions = http_get("/definitions", ?OK),
-    http_delete("/exchanges/%2f/myexchange", ?NO_CONTENT),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    http_post("/definitions", Definitions, ?CREATED),
-    [{'alternate-exchange', <<"amq.direct">>}] =
-        pget(arguments, http_get("/exchanges/%2f/myexchange", ?OK)),
-    [{'x-expires', 1800000}] =
-        pget(arguments, http_get("/queues/%2f/myqueue", ?OK)),
-    true = lists:sort([{'x-match', <<"all">>}, {foo, <<"bar">>}]) =:=
-           lists:sort(pget(arguments,
-                           http_get("/bindings/%2f/e/myexchange/q/myqueue/" ++
-                                    "~nXOkVwqZzUOdS9_HcBWheg", ?OK))),
-    http_delete("/exchanges/%2f/myexchange", ?NO_CONTENT),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    ok.
-
-arguments_table_test() ->
-    Args = [{'upstreams', [<<"amqp://localhost/%2f/upstream1">>,
-                           <<"amqp://localhost/%2f/upstream2">>]}],
-    XArgs = [{type, <<"headers">>},
-             {arguments, Args}],
-    http_put("/exchanges/%2f/myexchange", XArgs, ?NO_CONTENT),
-    Definitions = http_get("/definitions", ?OK),
-    http_delete("/exchanges/%2f/myexchange", ?NO_CONTENT),
-    http_post("/definitions", Definitions, ?CREATED),
-    Args = pget(arguments, http_get("/exchanges/%2f/myexchange", ?OK)),
-    http_delete("/exchanges/%2f/myexchange", ?NO_CONTENT),
-    ok.
-
-queue_purge_test() ->
-    QArgs = [],
-    http_put("/queues/%2f/myqueue", QArgs, ?NO_CONTENT),
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    Publish = fun() ->
-                      amqp_channel:call(
-                        Ch, #'basic.publish'{exchange = <<"">>,
-                                             routing_key = <<"myqueue">>},
-                        #amqp_msg{payload = <<"message">>})
-              end,
-    Publish(),
-    Publish(),
-    amqp_channel:call(
-      Ch, #'queue.declare'{queue = <<"exclusive">>, exclusive = true}),
-    {#'basic.get_ok'{}, _} =
-        amqp_channel:call(Ch, #'basic.get'{queue = <<"myqueue">>}),
-    http_delete("/queues/%2f/myqueue/contents", ?NO_CONTENT),
-    http_delete("/queues/%2f/badqueue/contents", ?NOT_FOUND),
-    http_delete("/queues/%2f/exclusive/contents", ?BAD_REQUEST),
-    http_delete("/queues/%2f/exclusive", ?BAD_REQUEST),
-    #'basic.get_empty'{} =
-        amqp_channel:call(Ch, #'basic.get'{queue = <<"myqueue">>}),
-    amqp_channel:close(Ch),
-    amqp_connection:close(Conn),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    ok.
-
-queue_actions_test() ->
-    http_put("/queues/%2f/q", [], ?NO_CONTENT),
-    http_post("/queues/%2f/q/actions", [{action, sync}], ?NO_CONTENT),
-    http_post("/queues/%2f/q/actions", [{action, cancel_sync}], ?NO_CONTENT),
-    http_post("/queues/%2f/q/actions", [{action, change_colour}], ?BAD_REQUEST),
-    http_delete("/queues/%2f/q", ?NO_CONTENT),
-    ok.
-
-exclusive_consumer_test() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    #'queue.declare_ok'{ queue = QName } =
-        amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue     = QName,
-                                                exclusive = true}, self()),
-    timer:sleep(1000), %% Sadly we need to sleep to let the stats update
-    http_get("/queues/%2f/"), %% Just check we don't blow up
-    amqp_channel:close(Ch),
-    amqp_connection:close(Conn),
-    ok.
-
-sorting_test() ->
-    QArgs = [],
-    PermArgs = [{configure, <<".*">>}, {write, <<".*">>}, {read, <<".*">>}],
-    http_put("/vhosts/vh1", none, ?NO_CONTENT),
-    http_put("/permissions/vh1/guest", PermArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/test0", QArgs, ?NO_CONTENT),
-    http_put("/queues/vh1/test1", QArgs, ?NO_CONTENT),
-    http_put("/queues/%2f/test2", QArgs, ?NO_CONTENT),
-    http_put("/queues/vh1/test3", QArgs, ?NO_CONTENT),
-    assert_list([[{name, <<"test0">>}],
-                 [{name, <<"test2">>}],
-                 [{name, <<"test1">>}],
-                 [{name, <<"test3">>}]], http_get("/queues", ?OK)),
-    assert_list([[{name, <<"test0">>}],
-                 [{name, <<"test1">>}],
-                 [{name, <<"test2">>}],
-                 [{name, <<"test3">>}]], http_get("/queues?sort=name", ?OK)),
-    assert_list([[{name, <<"test0">>}],
-                 [{name, <<"test2">>}],
-                 [{name, <<"test1">>}],
-                 [{name, <<"test3">>}]], http_get("/queues?sort=vhost", ?OK)),
-    assert_list([[{name, <<"test3">>}],
-                 [{name, <<"test1">>}],
-                 [{name, <<"test2">>}],
-                 [{name, <<"test0">>}]], http_get("/queues?sort_reverse=true", ?OK)),
-    assert_list([[{name, <<"test3">>}],
-                 [{name, <<"test2">>}],
-                 [{name, <<"test1">>}],
-                 [{name, <<"test0">>}]], http_get("/queues?sort=name&sort_reverse=true", ?OK)),
-    assert_list([[{name, <<"test3">>}],
-                 [{name, <<"test1">>}],
-                 [{name, <<"test2">>}],
-                 [{name, <<"test0">>}]], http_get("/queues?sort=vhost&sort_reverse=true", ?OK)),
-    %% Rather poor but at least test it doesn't blow up with dots
-    http_get("/queues?sort=owner_pid_details.name", ?OK),
-    http_delete("/queues/%2f/test0", ?NO_CONTENT),
-    http_delete("/queues/vh1/test1", ?NO_CONTENT),
-    http_delete("/queues/%2f/test2", ?NO_CONTENT),
-    http_delete("/queues/vh1/test3", ?NO_CONTENT),
-    http_delete("/vhosts/vh1", ?NO_CONTENT),
-    ok.
-
-columns_test() ->
-    http_put("/queues/%2f/test", [{arguments, [{<<"foo">>, <<"bar">>}]}],
-             ?NO_CONTENT),
-    [[{name, <<"test">>}, {arguments, [{foo, <<"bar">>}]}]] =
-        http_get("/queues?columns=arguments.foo,name", ?OK),
-    [{name, <<"test">>}, {arguments, [{foo, <<"bar">>}]}] =
-        http_get("/queues/%2f/test?columns=arguments.foo,name", ?OK),
-    http_delete("/queues/%2f/test", ?NO_CONTENT),
-    ok.
-
-get_test() ->
-    %% Real world example...
-    Headers = [{<<"x-forwarding">>, array,
-                [{table,
-                  [{<<"uri">>, longstr,
-                    <<"amqp://localhost/%2f/upstream">>}]}]}],
-    http_put("/queues/%2f/myqueue", [], ?NO_CONTENT),
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    Publish = fun (Payload) ->
-                      amqp_channel:cast(
-                        Ch, #'basic.publish'{exchange = <<>>,
-                                             routing_key = <<"myqueue">>},
-                        #amqp_msg{props = #'P_basic'{headers = Headers},
-                                  payload = Payload})
-              end,
-    Publish(<<"1aaa">>),
-    Publish(<<"2aaa">>),
-    Publish(<<"3aaa">>),
-    amqp_connection:close(Conn),
-    [Msg] = http_post("/queues/%2f/myqueue/get", [{requeue,  false},
-                                                  {count,    1},
-                                                  {encoding, auto},
-                                                  {truncate, 1}], ?OK),
-    false         = pget(redelivered, Msg),
-    <<>>          = pget(exchange,    Msg),
-    <<"myqueue">> = pget(routing_key, Msg),
-    <<"1">>       = pget(payload,     Msg),
-    [{'x-forwarding',
-      [[{uri,<<"amqp://localhost/%2f/upstream">>}]]}] =
-        pget(headers, pget(properties, Msg)),
-
-    [M2, M3] = http_post("/queues/%2f/myqueue/get", [{requeue,  true},
-                                                     {count,    5},
-                                                     {encoding, auto}], ?OK),
-    <<"2aaa">> = pget(payload, M2),
-    <<"3aaa">> = pget(payload, M3),
-    2 = length(http_post("/queues/%2f/myqueue/get", [{requeue,  false},
-                                                     {count,    5},
-                                                     {encoding, auto}], ?OK)),
-    [] = http_post("/queues/%2f/myqueue/get", [{requeue,  false},
-                                               {count,    5},
-                                               {encoding, auto}], ?OK),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    ok.
-
-get_fail_test() ->
-    http_put("/users/myuser", [{password, <<"password">>},
-                               {tags, <<"management">>}], ?NO_CONTENT),
-    http_put("/queues/%2f/myqueue", [], ?NO_CONTENT),
-    http_post("/queues/%2f/myqueue/get",
-              [{requeue,  false},
-               {count,    1},
-               {encoding, auto}], "myuser", "password", ?NOT_AUTHORISED),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    http_delete("/users/myuser", ?NO_CONTENT),
-    ok.
-
-publish_test() ->
-    Headers = [{'x-forwarding', [[{uri,<<"amqp://localhost/%2f/upstream">>}]]}],
-    Msg = msg(<<"myqueue">>, Headers, <<"Hello world">>),
-    http_put("/queues/%2f/myqueue", [], ?NO_CONTENT),
-    ?assertEqual([{routed, true}],
-                 http_post("/exchanges/%2f/amq.default/publish", Msg, ?OK)),
-    [Msg2] = http_post("/queues/%2f/myqueue/get", [{requeue,  false},
-                                                   {count,    1},
-                                                   {encoding, auto}], ?OK),
-    assert_item(Msg, Msg2),
-    http_post("/exchanges/%2f/amq.default/publish", Msg2, ?OK),
-    [Msg3] = http_post("/queues/%2f/myqueue/get", [{requeue,  false},
-                                                   {count,    1},
-                                                   {encoding, auto}], ?OK),
-    assert_item(Msg, Msg3),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    ok.
-
-publish_fail_test() ->
-    Msg = msg(<<"myqueue">>, [], <<"Hello world">>),
-    http_put("/queues/%2f/myqueue", [], ?NO_CONTENT),
-    http_put("/users/myuser", [{password, <<"password">>},
-                               {tags, <<"management">>}], ?NO_CONTENT),
-    http_post("/exchanges/%2f/amq.default/publish", Msg, "myuser", "password",
-              ?NOT_AUTHORISED),
-    Msg2 = [{exchange,         <<"">>},
-            {routing_key,      <<"myqueue">>},
-            {properties,       [{user_id, <<"foo">>}]},
-            {payload,          <<"Hello world">>},
-            {payload_encoding, <<"string">>}],
-    http_post("/exchanges/%2f/amq.default/publish", Msg2, ?BAD_REQUEST),
-    Msg3 = [{exchange,         <<"">>},
-            {routing_key,      <<"myqueue">>},
-            {properties,       []},
-            {payload,          [<<"not a string">>]},
-            {payload_encoding, <<"string">>}],
-    http_post("/exchanges/%2f/amq.default/publish", Msg3, ?BAD_REQUEST),
-    MsgTemplate = [{exchange,         <<"">>},
-                   {routing_key,      <<"myqueue">>},
-                   {payload,          <<"Hello world">>},
-                   {payload_encoding, <<"string">>}],
-    [http_post("/exchanges/%2f/amq.default/publish",
-               [{properties, [BadProp]} | MsgTemplate], ?BAD_REQUEST)
-     || BadProp <- [{priority,   <<"really high">>},
-                    {timestamp,  <<"recently">>},
-                    {expiration, 1234}]],
-    http_delete("/users/myuser", ?NO_CONTENT),
-    ok.
-
-publish_base64_test() ->
-    Msg     = msg(<<"myqueue">>, [], <<"YWJjZA==">>, <<"base64">>),
-    BadMsg1 = msg(<<"myqueue">>, [], <<"flibble">>,  <<"base64">>),
-    BadMsg2 = msg(<<"myqueue">>, [], <<"YWJjZA==">>, <<"base99">>),
-    http_put("/queues/%2f/myqueue", [], ?NO_CONTENT),
-    http_post("/exchanges/%2f/amq.default/publish", Msg, ?OK),
-    http_post("/exchanges/%2f/amq.default/publish", BadMsg1, ?BAD_REQUEST),
-    http_post("/exchanges/%2f/amq.default/publish", BadMsg2, ?BAD_REQUEST),
-    [Msg2] = http_post("/queues/%2f/myqueue/get", [{requeue,  false},
-                                                   {count,    1},
-                                                   {encoding, auto}], ?OK),
-    ?assertEqual(<<"abcd">>, pget(payload, Msg2)),
-    http_delete("/queues/%2f/myqueue", ?NO_CONTENT),
-    ok.
-
-publish_unrouted_test() ->
-    Msg = msg(<<"hmmm">>, [], <<"Hello world">>),
-    ?assertEqual([{routed, false}],
-                 http_post("/exchanges/%2f/amq.default/publish", Msg, ?OK)).
-
-if_empty_unused_test() ->
-    http_put("/exchanges/%2f/test", [], ?NO_CONTENT),
-    http_put("/queues/%2f/test", [], ?NO_CONTENT),
-    http_post("/bindings/%2f/e/test/q/test", [], ?CREATED),
-    http_post("/exchanges/%2f/amq.default/publish",
-              msg(<<"test">>, [], <<"Hello world">>), ?OK),
-    http_delete("/queues/%2f/test?if-empty=true", ?BAD_REQUEST),
-    http_delete("/exchanges/%2f/test?if-unused=true", ?BAD_REQUEST),
-    http_delete("/queues/%2f/test/contents", ?NO_CONTENT),
-
-    {Conn, _ConnPath, _ChPath, _ConnChPath} = get_conn("guest", "guest"),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue = <<"test">> }, self()),
-    http_delete("/queues/%2f/test?if-unused=true", ?BAD_REQUEST),
-    amqp_connection:close(Conn),
-
-    http_delete("/queues/%2f/test?if-empty=true", ?NO_CONTENT),
-    http_delete("/exchanges/%2f/test?if-unused=true", ?NO_CONTENT),
-    passed.
-
-parameters_test() ->
-    rabbit_runtime_parameters_test:register(),
-
-    http_put("/parameters/test/%2f/good", [{value, <<"ignore">>}], ?NO_CONTENT),
-    http_put("/parameters/test/%2f/maybe", [{value, <<"good">>}], ?NO_CONTENT),
-    http_put("/parameters/test/%2f/maybe", [{value, <<"bad">>}], ?BAD_REQUEST),
-    http_put("/parameters/test/%2f/bad", [{value, <<"good">>}], ?BAD_REQUEST),
-    http_put("/parameters/test/um/good", [{value, <<"ignore">>}], ?NOT_FOUND),
-
-    Good = [{vhost,     <<"/">>},
-            {component, <<"test">>},
-            {name,      <<"good">>},
-            {value,     <<"ignore">>}],
-    Maybe = [{vhost,     <<"/">>},
-             {component, <<"test">>},
-             {name,      <<"maybe">>},
-             {value,     <<"good">>}],
-    List = [Good, Maybe],
-
-    assert_list(List, http_get("/parameters")),
-    assert_list(List, http_get("/parameters/test")),
-    assert_list(List, http_get("/parameters/test/%2f")),
-    assert_list([],   http_get("/parameters/oops")),
-    http_get("/parameters/test/oops", ?NOT_FOUND),
-
-    assert_item(Good,  http_get("/parameters/test/%2f/good", ?OK)),
-    assert_item(Maybe, http_get("/parameters/test/%2f/maybe", ?OK)),
-
-    http_delete("/parameters/test/%2f/good", ?NO_CONTENT),
-    http_delete("/parameters/test/%2f/maybe", ?NO_CONTENT),
-    http_delete("/parameters/test/%2f/bad", ?NOT_FOUND),
-
-    0 = length(http_get("/parameters")),
-    0 = length(http_get("/parameters/test")),
-    0 = length(http_get("/parameters/test/%2f")),
-    rabbit_runtime_parameters_test:unregister(),
-    ok.
-
-policy_test() ->
-    rabbit_runtime_parameters_test:register_policy_validator(),
-    PolicyPos  = [{vhost,      <<"/">>},
-                  {name,       <<"policy_pos">>},
-                  {pattern,    <<".*">>},
-                  {definition, [{testpos,[1,2,3]}]},
-                  {priority,   10}],
-    PolicyEven = [{vhost,      <<"/">>},
-                  {name,       <<"policy_even">>},
-                  {pattern,    <<".*">>},
-                  {definition, [{testeven,[1,2,3,4]}]},
-                  {priority,   10}],
-    http_put(
-      "/policies/%2f/policy_pos",
-      lists:keydelete(key, 1, PolicyPos),
-      ?NO_CONTENT),
-    http_put(
-      "/policies/%2f/policy_even",
-      lists:keydelete(key, 1, PolicyEven),
-      ?NO_CONTENT),
-    assert_item(PolicyPos,  http_get("/policies/%2f/policy_pos",  ?OK)),
-    assert_item(PolicyEven, http_get("/policies/%2f/policy_even", ?OK)),
-    List = [PolicyPos, PolicyEven],
-    assert_list(List, http_get("/policies",     ?OK)),
-    assert_list(List, http_get("/policies/%2f", ?OK)),
-
-    http_delete("/policies/%2f/policy_pos", ?NO_CONTENT),
-    http_delete("/policies/%2f/policy_even", ?NO_CONTENT),
-    0 = length(http_get("/policies")),
-    0 = length(http_get("/policies/%2f")),
-    rabbit_runtime_parameters_test:unregister_policy_validator(),
-    ok.
-
-policy_permissions_test() ->
-    rabbit_runtime_parameters_test:register(),
-
-    http_put("/users/admin",  [{password, <<"admin">>},
-                               {tags, <<"administrator">>}], ?NO_CONTENT),
-    http_put("/users/mon",    [{password, <<"monitor">>},
-                               {tags, <<"monitoring">>}], ?NO_CONTENT),
-    http_put("/users/policy", [{password, <<"policy">>},
-                               {tags, <<"policymaker">>}], ?NO_CONTENT),
-    http_put("/users/mgmt",   [{password, <<"mgmt">>},
-                               {tags, <<"management">>}], ?NO_CONTENT),
-    Perms = [{configure, <<".*">>},
-             {write,     <<".*">>},
-             {read,      <<".*">>}],
-    http_put("/vhosts/v", none, ?NO_CONTENT),
-    http_put("/permissions/v/admin",  Perms, ?NO_CONTENT),
-    http_put("/permissions/v/mon",    Perms, ?NO_CONTENT),
-    http_put("/permissions/v/policy", Perms, ?NO_CONTENT),
-    http_put("/permissions/v/mgmt",   Perms, ?NO_CONTENT),
-
-    Policy = [{pattern,    <<".*">>},
-              {definition, [{<<"ha-mode">>, <<"all">>}]}],
-    Param = [{value, <<"">>}],
-
-    http_put("/policies/%2f/HA", Policy, ?NO_CONTENT),
-    http_put("/parameters/test/%2f/good", Param, ?NO_CONTENT),
-
-    Pos = fun (U) ->
-                  http_put("/policies/v/HA",        Policy, U, U, ?NO_CONTENT),
-                  http_put(
-                    "/parameters/test/v/good",       Param, U, U, ?NO_CONTENT),
-                  1 = length(http_get("/policies",          U, U, ?OK)),
-                  1 = length(http_get("/parameters/test",   U, U, ?OK)),
-                  1 = length(http_get("/parameters",        U, U, ?OK)),
-                  1 = length(http_get("/policies/v",        U, U, ?OK)),
-                  1 = length(http_get("/parameters/test/v", U, U, ?OK)),
-                  http_get("/policies/v/HA",                U, U, ?OK),
-                  http_get("/parameters/test/v/good",       U, U, ?OK)
-          end,
-    Neg = fun (U) ->
-                  http_put("/policies/v/HA",    Policy, U, U, ?NOT_AUTHORISED),
-                  http_put(
-                    "/parameters/test/v/good",   Param, U, U, ?NOT_AUTHORISED),
-                  http_put(
-                    "/parameters/test/v/admin",  Param, U, U, ?NOT_AUTHORISED),
-                  http_get("/policies",                 U, U, ?NOT_AUTHORISED),
-                  http_get("/policies/v",               U, U, ?NOT_AUTHORISED),
-                  http_get("/parameters",               U, U, ?NOT_AUTHORISED),
-                  http_get("/parameters/test",          U, U, ?NOT_AUTHORISED),
-                  http_get("/parameters/test/v",        U, U, ?NOT_AUTHORISED),
-                  http_get("/policies/v/HA",            U, U, ?NOT_AUTHORISED),
-                  http_get("/parameters/test/v/good",   U, U, ?NOT_AUTHORISED)
-          end,
-    AlwaysNeg =
-        fun (U) ->
-                http_put("/policies/%2f/HA",  Policy, U, U, ?NOT_AUTHORISED),
-                http_put(
-                  "/parameters/test/%2f/good", Param, U, U, ?NOT_AUTHORISED),
-                http_get("/policies/%2f/HA",          U, U, ?NOT_AUTHORISED),
-                http_get("/parameters/test/%2f/good", U, U, ?NOT_AUTHORISED)
-        end,
-
-    [Neg(U) || U <- ["mon", "mgmt"]],
-    [Pos(U) || U <- ["admin", "policy"]],
-    [AlwaysNeg(U) || U <- ["mon", "mgmt", "admin", "policy"]],
-
-    %% This one is deliberately different between admin and policymaker.
-    http_put("/parameters/test/v/admin", Param, "admin", "admin", ?NO_CONTENT),
-    http_put("/parameters/test/v/admin", Param, "policy", "policy",
-             ?BAD_REQUEST),
-
-    http_delete("/vhosts/v", ?NO_CONTENT),
-    http_delete("/users/admin", ?NO_CONTENT),
-    http_delete("/users/mon", ?NO_CONTENT),
-    http_delete("/users/policy", ?NO_CONTENT),
-    http_delete("/users/mgmt", ?NO_CONTENT),
-    http_delete("/policies/%2f/HA", ?NO_CONTENT),
-
-    rabbit_runtime_parameters_test:unregister(),
-    ok.
-
-
-extensions_test() ->
-    [[{javascript,<<"dispatcher.js">>}]] = http_get("/extensions", ?OK),
-    ok.
-
-%%---------------------------------------------------------------------------
-
-msg(Key, Headers, Body) ->
-    msg(Key, Headers, Body, <<"string">>).
-
-msg(Key, Headers, Body, Enc) ->
-    [{exchange,         <<"">>},
-     {routing_key,      Key},
-     {properties,       [{delivery_mode, 2},
-                         {headers,       Headers}]},
-     {payload,          Body},
-     {payload_encoding, Enc}].
-
-local_port(Conn) ->
-    [{sock, Sock}] = amqp_connection:info(Conn, [sock]),
-    {ok, Port} = inet:port(Sock),
-    Port.
-
-%%---------------------------------------------------------------------------
-http_get(Path) ->
-    http_get(Path, ?OK).
-
-http_get(Path, CodeExp) ->
-    http_get(Path, "guest", "guest", CodeExp).
-
-http_get(Path, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(get, Path, [auth_header(User, Pass)]),
-    assert_code(CodeExp, CodeAct, "GET", Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-http_put(Path, List, CodeExp) ->
-    http_put_raw(Path, format_for_upload(List), CodeExp).
-
-http_put(Path, List, User, Pass, CodeExp) ->
-    http_put_raw(Path, format_for_upload(List), User, Pass, CodeExp).
-
-http_post(Path, List, CodeExp) ->
-    http_post_raw(Path, format_for_upload(List), CodeExp).
-
-http_post(Path, List, User, Pass, CodeExp) ->
-    http_post_raw(Path, format_for_upload(List), User, Pass, CodeExp).
-
-format_for_upload(none) ->
-    <<"">>;
-format_for_upload(List) ->
-    iolist_to_binary(mochijson2:encode({struct, List})).
-
-http_put_raw(Path, Body, CodeExp) ->
-    http_upload_raw(put, Path, Body, "guest", "guest", CodeExp).
-
-http_put_raw(Path, Body, User, Pass, CodeExp) ->
-    http_upload_raw(put, Path, Body, User, Pass, CodeExp).
-
-http_post_raw(Path, Body, CodeExp) ->
-    http_upload_raw(post, Path, Body, "guest", "guest", CodeExp).
-
-http_post_raw(Path, Body, User, Pass, CodeExp) ->
-    http_upload_raw(post, Path, Body, User, Pass, CodeExp).
-
-http_upload_raw(Type, Path, Body, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(Type, Path, [auth_header(User, Pass)], Body),
-    assert_code(CodeExp, CodeAct, Type, Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-http_delete(Path, CodeExp) ->
-    http_delete(Path, "guest", "guest", CodeExp).
-
-http_delete(Path, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(delete, Path, [auth_header(User, Pass)]),
-    assert_code(CodeExp, CodeAct, "DELETE", Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-assert_code(CodeExp, CodeAct, Type, Path, Body) ->
-    case CodeExp of
-        CodeAct -> ok;
-        _       -> throw({expected, CodeExp, got, CodeAct, type, Type,
-                          path, Path, body, Body})
-    end.
-
-req(Type, Path, Headers) ->
-    httpc:request(Type, {?PREFIX ++ Path, Headers}, ?HTTPC_OPTS, []).
-
-req(Type, Path, Headers, Body) ->
-    httpc:request(Type, {?PREFIX ++ Path, Headers, "application/json", Body},
-                  ?HTTPC_OPTS, []).
-
-decode(?OK, _Headers,  ResBody) -> cleanup(mochijson2:decode(ResBody));
-decode(_,    Headers, _ResBody) -> Headers.
-
-cleanup(L) when is_list(L) ->
-    [cleanup(I) || I <- L];
-cleanup({struct, I}) ->
-    cleanup(I);
-cleanup({K, V}) when is_binary(K) ->
-    {list_to_atom(binary_to_list(K)), cleanup(V)};
-cleanup(I) ->
-    I.
-
-auth_header(Username, Password) ->
-    {"Authorization",
-     "Basic " ++ binary_to_list(base64:encode(Username ++ ":" ++ Password))}.
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_unit.erl b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_unit.erl
deleted file mode 100644 (file)
index de71872..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_test_unit).
-
--include_lib("eunit/include/eunit.hrl").
-
-tokenise_test() ->
-    [] = rabbit_mgmt_format:tokenise(""),
-    ["foo"] = rabbit_mgmt_format:tokenise("foo"),
-    ["foo", "bar"] = rabbit_mgmt_format:tokenise("foo~bar"),
-    ["foo", "", "bar"] = rabbit_mgmt_format:tokenise("foo~~bar"),
-    ok.
-
-pack_binding_test() ->
-    assert_binding(<<"~">>,
-                   <<"">>, []),
-    assert_binding(<<"foo">>,
-                   <<"foo">>, []),
-    assert_binding(<<"foo%7Ebar%2Fbash">>,
-                   <<"foo~bar/bash">>, []),
-    assert_binding(<<"foo%7Ebar%7Ebash">>,
-                   <<"foo~bar~bash">>, []),
-    ok.
-
-amqp_table_test() ->
-    assert_table({struct, []}, []),
-    assert_table({struct, [{<<"x-expires">>, 1000}]},
-                 [{<<"x-expires">>, long, 1000}]),
-    assert_table({struct,
-                  [{<<"x-forwarding">>,
-                    [{struct,
-                      [{<<"uri">>, <<"amqp://localhost/%2f/upstream">>}]}]}]},
-                 [{<<"x-forwarding">>, array,
-                   [{table, [{<<"uri">>, longstr,
-                              <<"amqp://localhost/%2f/upstream">>}]}]}]).
-
-assert_table(JSON, AMQP) ->
-    ?assertEqual(JSON, rabbit_mgmt_format:amqp_table(AMQP)),
-    ?assertEqual(AMQP, rabbit_mgmt_format:to_amqp_table(JSON)).
-
-%%--------------------------------------------------------------------
-
-assert_binding(Packed, Routing, Args) ->
-    case rabbit_mgmt_format:pack_binding_props(Routing, Args) of
-        Packed ->
-            ok;
-        Act ->
-            throw({pack, Routing, Args, expected, Packed, got, Act})
-    end.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbit_mgmt_test_util.erl
deleted file mode 100644 (file)
index 1e53d89..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2012 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_mgmt_test_util).
-
--export([assert_list/2, assert_item/2, test_item/2]).
-
-assert_list(Exp, Act) ->
-    case length(Exp) == length(Act) of
-        true  -> ok;
-        false -> throw({expected, Exp, actual, Act})
-    end,
-    [case length(lists:filter(fun(ActI) -> test_item(ExpI, ActI) end, Act)) of
-         1 -> ok;
-         N -> throw({found, N, ExpI, in, Act})
-     end || ExpI <- Exp].
-
-assert_item(Exp, Act) ->
-    case test_item0(Exp, Act) of
-        [] -> ok;
-        Or -> throw(Or)
-    end.
-
-test_item(Exp, Act) ->
-    case test_item0(Exp, Act) of
-        [] -> true;
-        _  -> false
-    end.
-
-test_item0(Exp, Act) ->
-    [{did_not_find, ExpI, in, Act} || ExpI <- Exp,
-                                      not lists:member(ExpI, Act)].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbitmqadmin-test-wrapper.sh b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbitmqadmin-test-wrapper.sh
deleted file mode 100755 (executable)
index d684ec9..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/sh -e
-TWO=$(python2 -c 'import sys;print(sys.version_info[0])')
-THREE=$(python3 -c 'import sys;print(sys.version_info[0])')
-
-if [ $TWO != 2 ] ; then
-    echo Python 2 not found!
-    exit 1
-fi
-
-if [ $THREE != 3 ] ; then
-    echo Python 3 not found!
-    exit 1
-fi
-
-echo
-echo ----------------------
-echo Testing under Python 2
-echo ----------------------
-
-python2 $(dirname $0)/rabbitmqadmin-test.py
-
-echo
-echo ----------------------
-echo Testing under Python 3
-echo ----------------------
-
-python3 $(dirname $0)/rabbitmqadmin-test.py
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbitmqadmin-test.py b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/rabbitmqadmin-test.py
deleted file mode 100755 (executable)
index 47af73b..0000000
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python
-
-import unittest
-import os
-import os.path
-import socket
-import subprocess
-import sys
-import shutil
-
-# TODO test: SSL, depth, config file, encodings(?), completion(???)
-
-class TestRabbitMQAdmin(unittest.TestCase):
-    def test_no_args(self):
-        self.run_fail([])
-
-    def test_help(self):
-        self.run_success(['--help'])
-        self.run_success(['help', 'subcommands'])
-        self.run_success(['help', 'config'])
-        self.run_fail(['help', 'astronomy'])
-
-    def test_host(self):
-        self.run_success(['show', 'overview'])
-        self.run_success(['--host', 'localhost', 'show', 'overview'])
-        self.run_fail(['--host', 'some-host-that-does-not-exist', 'show', 'overview'])
-
-    def test_port(self):
-        # Test port selection
-        self.run_success(['--port', '15672', 'show', 'overview'])
-        # Test port not open
-        self.run_fail(['--port', '15673', 'show', 'overview'])
-        # Test port open but not talking HTTP
-        self.run_fail(['--port', '5672', 'show', 'overview'])
-
-    def test_config(self):
-        original_home = os.getenv('HOME')
-        tmpdir = os.getenv("TMPDIR") or os.getenv("TEMP") or "/tmp"
-        shutil.copyfile(os.path.dirname(__file__) + os.sep + "default-config",
-                        tmpdir + os.sep + ".rabbitmqadmin.conf")
-        os.environ['HOME'] = tmpdir
-
-        self.run_fail(['--config', '/tmp/no-such-config-file', 'show', 'overview'])
-
-        cf = os.path.dirname(__file__) + os.sep + "test-config"
-        self.run_success(['--config', cf, '--node', 'host_normal', 'show', 'overview'])
-
-        # test 'default node in the config file' where "default" uses an invalid host
-        self.run_fail(['--config', cf, 'show', 'overview'])
-        self.run_success(["show", "overview"])
-        self.run_fail(['--node', 'non_default', "show", "overview"])
-        os.environ['HOME'] = original_home
-
-    def test_user(self):
-        self.run_success(['--user', 'guest', '--password', 'guest', 'show', 'overview'])
-        self.run_fail(['--user', 'no', '--password', 'guest', 'show', 'overview'])
-        self.run_fail(['--user', 'guest', '--password', 'no', 'show', 'overview'])
-
-    def test_fmt_long(self):
-        self.assert_output("""
---------------------------------------------------------------------------------
-
-   name: /
-tracing: False
-
---------------------------------------------------------------------------------
-
-""", ['--format', 'long', 'list', 'vhosts', 'name', 'tracing'])
-
-    def test_fmt_kvp(self):
-        self.assert_output("""name="/" tracing="False"
-""", ['--format', 'kvp', 'list', 'vhosts', 'name', 'tracing'])
-
-    def test_fmt_tsv(self):
-        self.assert_output("""name     tracing
-/      False
-""", ['--format', 'tsv', 'list', 'vhosts', 'name', 'tracing'])
-
-    def test_fmt_table(self):
-        out = """+------+---------+
-| name | tracing |
-+------+---------+
-| /    | False   |
-+------+---------+
-"""
-        self.assert_output(out, ['list', 'vhosts', 'name', 'tracing'])
-        self.assert_output(out, ['--format', 'table', 'list', 'vhosts', 'name', 'tracing'])
-
-    def test_fmt_bash(self):
-        self.assert_output("""/
-""", ['--format', 'bash', 'list', 'vhosts', 'name', 'tracing'])
-
-    def test_vhosts(self):
-        self.assert_list(['/'], l('vhosts'))
-        self.run_success(['declare', 'vhost', 'name=foo'])
-        self.assert_list(['/', 'foo'], l('vhosts'))
-        self.run_success(['delete', 'vhost', 'name=foo'])
-        self.assert_list(['/'], l('vhosts'))
-
-    def test_users(self):
-        self.assert_list(['guest'], l('users'))
-        self.run_fail(['declare', 'user', 'name=foo'])
-        self.run_success(['declare', 'user', 'name=foo', 'password=pass', 'tags='])
-        self.assert_list(['foo', 'guest'], l('users'))
-        self.run_success(['delete', 'user', 'name=foo'])
-        self.assert_list(['guest'], l('users'))
-
-    def test_permissions(self):
-        self.run_success(['declare', 'vhost', 'name=foo'])
-        self.run_success(['declare', 'user', 'name=bar', 'password=pass', 'tags='])
-        self.assert_table([['guest', '/']], ['list', 'permissions', 'user', 'vhost'])
-        self.run_success(['declare', 'permission', 'user=bar', 'vhost=foo', 'configure=.*', 'write=.*', 'read=.*'])
-        self.assert_table([['guest', '/'], ['bar', 'foo']], ['list', 'permissions', 'user', 'vhost'])
-        self.run_success(['delete', 'user', 'name=bar'])
-        self.run_success(['delete', 'vhost', 'name=foo'])
-
-    def test_alt_vhost(self):
-        self.run_success(['declare', 'vhost', 'name=foo'])
-        self.run_success(['declare', 'permission', 'user=guest', 'vhost=foo', 'configure=.*', 'write=.*', 'read=.*'])
-        self.run_success(['declare', 'queue', 'name=in_/'])
-        self.run_success(['--vhost', 'foo', 'declare', 'queue', 'name=in_foo'])
-        self.assert_table([['/', 'in_/'], ['foo', 'in_foo']], ['list', 'queues', 'vhost', 'name'])
-        self.run_success(['--vhost', 'foo', 'delete', 'queue', 'name=in_foo'])
-        self.run_success(['delete', 'queue', 'name=in_/'])
-        self.run_success(['delete', 'vhost', 'name=foo'])
-
-    def test_exchanges(self):
-        self.run_success(['declare', 'exchange', 'name=foo', 'type=direct'])
-        self.assert_list(['', 'amq.direct', 'amq.fanout', 'amq.headers', 'amq.match', 'amq.rabbitmq.log', 'amq.rabbitmq.trace', 'amq.topic', 'foo'], l('exchanges'))
-        self.run_success(['delete', 'exchange', 'name=foo'])
-
-    def test_queues(self):
-        self.run_success(['declare', 'queue', 'name=foo'])
-        self.assert_list(['foo'], l('queues'))
-        self.run_success(['delete', 'queue', 'name=foo'])
-
-    def test_bindings(self):
-        self.run_success(['declare', 'queue', 'name=foo'])
-        self.run_success(['declare', 'binding', 'source=amq.direct', 'destination=foo', 'destination_type=queue', 'routing_key=test'])
-        self.assert_table([['', 'foo', 'queue', 'foo'], ['amq.direct', 'foo', 'queue', 'test']], ['list', 'bindings', 'source', 'destination', 'destination_type', 'routing_key'])
-        self.run_success(['delete', 'queue', 'name=foo'])
-
-    def test_policies(self):
-        self.run_success(['declare', 'policy', 'name=ha', 'pattern=.*', 'definition={"ha-mode":"all"}'])
-        self.assert_table([['ha', '/', '.*', '{"ha-mode": "all"}']], ['list', 'policies', 'name', 'vhost', 'pattern', 'definition'])
-        self.run_success(['delete', 'policy', 'name=ha'])
-
-    def test_parameters(self):
-        self.ctl(['eval', 'rabbit_runtime_parameters_test:register().'])
-        self.run_success(['declare', 'parameter', 'component=test', 'name=good', 'value=123'])
-        self.assert_table([['test', 'good', '/', '123']], ['list', 'parameters', 'component', 'name', 'vhost', 'value'])
-        self.run_success(['delete', 'parameter', 'component=test', 'name=good'])
-        self.ctl(['eval', 'rabbit_runtime_parameters_test:unregister().'])
-
-    def test_publish(self):
-        self.run_success(['declare', 'queue', 'name=test'])
-        self.run_success(['publish', 'routing_key=test', 'payload=test_1'])
-        self.run_success(['publish', 'routing_key=test', 'payload=test_2'])
-        self.run_success(['publish', 'routing_key=test'], stdin=b'test_3')
-        self.assert_table([exp_msg('test', 2, False, 'test_1')], ['get', 'queue=test', 'requeue=false'])
-        self.assert_table([exp_msg('test', 1, False, 'test_2')], ['get', 'queue=test', 'requeue=true'])
-        self.assert_table([exp_msg('test', 1, True,  'test_2')], ['get', 'queue=test', 'requeue=false'])
-        self.assert_table([exp_msg('test', 0, False, 'test_3')], ['get', 'queue=test', 'requeue=false'])
-        self.run_success(['publish', 'routing_key=test'], stdin=b'test_4')
-        filename = '/tmp/rabbitmq-test/get.txt'
-        ensure_dir(filename)
-        self.run_success(['get', 'queue=test', 'requeue=false', 'payload_file=' + filename])
-        with open(filename) as f:
-            self.assertEqual('test_4', f.read())
-        os.remove(filename)
-        self.run_success(['delete', 'queue', 'name=test'])
-
-    def test_ignore_vhost(self):
-        self.run_success(['--vhost', '/', 'show', 'overview'])
-        self.run_success(['--vhost', '/', 'list', 'users'])
-        self.run_success(['--vhost', '/', 'list', 'vhosts'])
-        self.run_success(['--vhost', '/', 'list', 'nodes'])
-        self.run_success(['--vhost', '/', 'list', 'permissions'])
-        self.run_success(['--vhost', '/', 'declare', 'user', 'name=foo', 'password=pass', 'tags='])
-        self.run_success(['delete', 'user', 'name=foo'])
-
-    def test_sort(self):
-        self.run_success(['declare', 'queue', 'name=foo'])
-        self.run_success(['declare', 'binding', 'source=amq.direct', 'destination=foo', 'destination_type=queue', 'routing_key=bbb'])
-        self.run_success(['declare', 'binding', 'source=amq.topic', 'destination=foo', 'destination_type=queue', 'routing_key=aaa'])
-        self.assert_table([['', 'foo'], ['amq.direct', 'bbb'], ['amq.topic', 'aaa']], ['--sort', 'source', 'list', 'bindings', 'source', 'routing_key'])
-        self.assert_table([['amq.topic', 'aaa'], ['amq.direct', 'bbb'], ['', 'foo']], ['--sort', 'routing_key', 'list', 'bindings', 'source', 'routing_key'])
-        self.assert_table([['amq.topic', 'aaa'], ['amq.direct', 'bbb'], ['', 'foo']], ['--sort', 'source', '--sort-reverse', 'list', 'bindings', 'source', 'routing_key'])
-        self.run_success(['delete', 'queue', 'name=foo'])
-
-    # ---------------------------------------------------------------------------
-
-    def run_success(self, args, **kwargs):
-        (stdout, ret) = self.admin(args, **kwargs)
-        if ret != 0:
-            self.fail(stdout)
-
-    def run_fail(self, args):
-        (stdout, ret) = self.admin(args)
-        if ret == 0:
-            self.fail(stdout)
-
-    def assert_output(self, expected, args):
-        self.assertEqual(expected, self.admin(args)[0])
-
-    def assert_list(self, expected, args0):
-        args = ['-f', 'tsv', '-q']
-        args.extend(args0)
-        self.assertEqual(expected, self.admin(args)[0].splitlines())
-
-    def assert_table(self, expected, args0):
-        args = ['-f', 'tsv', '-q']
-        args.extend(args0)
-        self.assertEqual(expected, [l.split('\t') for l in self.admin(args)[0].splitlines()])
-
-    def admin(self, args0, stdin=None):
-        args = ['python{0}'.format(sys.version_info[0]),
-                norm('../../../bin/rabbitmqadmin')]
-        args.extend(args0)
-        return run(args, stdin)
-
-    def ctl(self, args0, stdin=None):
-        args = [norm('../../../../rabbitmq-server/scripts/rabbitmqctl'), '-n', 'rabbit-test']
-        args.extend(args0)
-        (stdout, ret) = run(args, stdin)
-        if ret != 0:
-            self.fail(stdout)
-
-def norm(cmd):
-    return os.path.normpath(os.path.join(os.getcwd(), sys.argv[0], cmd))
-
-def run(args, stdin):
-    proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    (stdout, stderr) = proc.communicate(stdin)
-    returncode = proc.returncode
-    res = stdout.decode('utf-8') + stderr.decode('utf-8')
-    return (res, returncode)
-
-def l(thing):
-    return ['list', thing, 'name']
-
-def exp_msg(key, count, redelivered, payload):
-    # routing_key, exchange, message_count, payload, payload_bytes, payload_encoding, properties, redelivered
-    return [key, '', str(count), payload, str(len(payload)), 'string', '', str(redelivered)]
-
-def ensure_dir(f):
-    d = os.path.dirname(f)
-    if not os.path.exists(d):
-        os.makedirs(d)
-
-if __name__ == '__main__':
-    print("\nrabbitmqadmin tests\n===================\n")
-    suite = unittest.TestLoader().loadTestsFromTestCase(TestRabbitMQAdmin)
-    results = unittest.TextTestRunner(verbosity=2).run(suite)
-    if not results.wasSuccessful():
-        sys.exit(1)
diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/test/src/test-config b/rabbitmq-server/plugins-src/rabbitmq-management/test/src/test-config
deleted file mode 100644 (file)
index 93322e7..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-# rabbitmqadmin.conf.example START
-
-[host_normal]
-hostname = localhost
-port = 15672
-username = guest
-password = guest
-declare_vhost = / # Used as default for declare / delete only
-vhost = /         # Used as default for declare / delete / list
-
-[default]
-hostname = localhost
-port = 99999
-username = guest
-password = guest
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-mqtt/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-mqtt/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile b/rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md b/rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md
deleted file mode 100644 (file)
index 72ba9ea..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-# RabbitMQ MQTT adapter
-
-The MQTT adapter is included in the RabbitMQ distribution.  To enable
-it, use <href="http://www.rabbitmq.com/man/rabbitmq-plugins.1.man.html">rabbitmq-plugins</a>:
-
-    rabbitmq-plugins enable rabbitmq_mqtt
-
-Full usage instructions can be found at
-<http://www.rabbitmq.com/mqtt.html>.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk b/rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk
deleted file mode 100644 (file)
index 9db0895..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-server rabbitmq-erlang-client rabbitmq-test
-STANDALONE_TEST_COMMANDS:=eunit:test(rabbit_mqtt_util)
-WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/test.sh
-WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/test/ebin/test
-WITH_BROKER_SETUP_SCRIPTS:=$(PACKAGE_DIR)/test/setup-rabbit-test.sh
-
-define package_rules
-
-$(PACKAGE_DIR)+pre-test::
-       rm -rf $(PACKAGE_DIR)/test/certs
-       mkdir $(PACKAGE_DIR)/test/certs
-       mkdir -p $(PACKAGE_DIR)/test/ebin
-       sed -E -e "s|%%CERTS_DIR%%|$(abspath $(PACKAGE_DIR))/test/certs|g" < $(PACKAGE_DIR)/test/src/test.config > $(PACKAGE_DIR)/test/ebin/test.config
-       $(MAKE) -C $(PACKAGE_DIR)/../rabbitmq-test/certs all PASSWORD=bunnychow DIR=$(abspath $(PACKAGE_DIR))/test/certs
-       cp $(PACKAGE_DIR)/test/src/rabbitmq_mqtt_standalone.app.src $(PACKAGE_DIR)/test/ebin/rabbitmq_mqtt.app
-
-$(PACKAGE_DIR)+clean::
-       rm -rf $(PACKAGE_DIR)/test/certs
-
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbitmq_mqtt.app.src b/rabbitmq-server/plugins-src/rabbitmq-mqtt/src/rabbitmq_mqtt.app.src
deleted file mode 100644 (file)
index 6b60610..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-{application, rabbitmq_mqtt,
- [{description, "RabbitMQ MQTT Adapter"},
-  {vsn, "%%VSN%%"},
-  {modules, []},
-  {registered, []},
-  {mod, {rabbit_mqtt, []}},
-  {env, [{default_user, <<"guest">>},
-         {default_pass, <<"guest">>},
-         {ssl_cert_login,false},
-         {allow_anonymous, true},
-         {vhost, <<"/">>},
-         {exchange, <<"amq.topic">>},
-         {subscription_ttl, 1800000}, % 30 min
-         {prefetch, 10},
-         {ssl_listeners, []},
-         {tcp_listeners, [1883]},
-         {tcp_listen_options, [binary,
-                               {packet,    raw},
-                               {reuseaddr, true},
-                               {backlog,   128},
-                               {nodelay,   true}]}]},
-  {applications, [kernel, stdlib, rabbit, amqp_client]}]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/Makefile b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/Makefile
deleted file mode 100644 (file)
index 190e740..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-UPSTREAM_GIT=https://git.eclipse.org/gitroot/paho/org.eclipse.paho.mqtt.java.git
-REVISION=00b5b2f99ae8410b7d96d106e080a092c5f92546
-
-JC=javac
-
-TEST_SRC=src
-CHECKOUT_DIR=test_client
-PAHO_JAR_NAME=org.eclipse.paho.client.mqttv3.jar
-PAHO_JAR=$(CHECKOUT_DIR)/org.eclipse.paho.client.mqttv3/$(PAHO_JAR_NAME)
-JUNIT_JAR=../lib/junit.jar
-JAVA_AMQP_DIR=../../rabbitmq-java-client/
-JAVA_AMQP_CLASSES=$(JAVA_AMQP_DIR)build/classes/
-
-ALL_CLASSES:=$(foreach f,$(shell find src -name '*.class'),'$(f)')
-CP:=$(PAHO_JAR):$(JUNIT_JAR):$(TEST_SRC):$(JAVA_AMQP_CLASSES)
-
-HOSTNAME:=$(shell hostname)
-
-define class_from_path
-$(subst .class,,$(subst src.,,$(subst /,.,$(1))))
-endef
-
-.PHONY: test
-test: build_java_amqp
-       ant test -Dhostname=$(HOSTNAME)
-
-clean:
-       ant clean
-       rm -rf test_client
-
-
-distclean: clean
-       rm -rf $(CHECKOUT_DIR)
-
-$(CHECKOUT_DIR):
-       git clone $(UPSTREAM_GIT) $@
-       (cd $@ && git checkout $(REVISION)) || rm -rf $@
-
-
-.PHONY: build_java_amqp
-build_java_amqp: $(CHECKOUT_DIR)
-       $(MAKE) -C $(JAVA_AMQP_DIR) jar
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/build.properties b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/build.properties
deleted file mode 100644 (file)
index 25da28c..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-build.out=build
-test.resources=${build.out}/test/resources
-javac.debug=true
-test.javac.out=${build.out}/test/classes
-test.resources=${build.out}/test/resources
-test.src.home=src
-certs.dir=certs
-certs.password=test
-server.keystore=${test.resources}/server.jks
-server.cert=${certs.dir}/server/cert.pem
-ca.cert=${certs.dir}/testca/cacert.pem
-server.keystore.phrase=bunnyhop
-
-client.keystore=${test.resources}/client.jks
-client.keystore.phrase=bunnychow
-client.srckeystore=${certs.dir}/client/keycert.p12
-client.srckeystore.password=bunnychow
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/build.xml b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/build.xml
deleted file mode 100644 (file)
index 1f80b16..0000000
+++ /dev/null
@@ -1,150 +0,0 @@
-<?xml version="1.0"?>
-<project name="MQTT Java Test client" default="build">
-
-  <property name="output.folder" value="./target/work" />
-  <property name="ship.folder" value="./" />
-
-  <property file="build.properties"/>
-
-  <property name="java-amqp-client-path" location="../../rabbitmq-java-client" />
-
-  <path id="test.javac.classpath">
-    <!-- cf dist target, infra -->
-    <fileset dir="lib">
-      <include name="**/*.jar"/>
-    </fileset>
-    <fileset dir="test_client">
-      <include name="**/*.jar"/>
-    </fileset>
-    <fileset dir="${java-amqp-client-path}">
-      <include name="**/rabbitmq-client.jar" />
-    </fileset>
-  </path>
-
-  <target name="clean-paho" description="Clean compiled Eclipe Paho Test Client jars" >
-    <ant antfile="test_client/org.eclipse.paho.client.mqttv3/build.xml" useNativeBasedir="true" target="clean"/>
-  </target>
-
-  <target name="clean"  >
-    <delete dir="${build.out}"/>
-  </target>
-
-  <target name="build-paho" depends="clean-paho" description="Build the Eclipse Paho Test Client">
-    <ant antfile="test_client/org.eclipse.paho.client.mqttv3/build.xml" useNativeBasedir="true" />
-  </target>
-
-  <target name="detect-ssl">
-    <available property="SSL_AVAILABLE" file="${certs.dir}/client"/>
-    <property name="CLIENT_KEYSTORE_PHRASE" value="bunnies"/>
-    <property name="SSL_P12_PASSWORD" value="${certs.password}"/>
-  </target>
-
-  <target name="detect-tmpdir">
-    <property environment="env"/>
-    <condition property="TMPDIR" value="${env.TMPDIR}" else="/tmp">
-      <available file="${env.TMPDIR}" type="dir"/>
-    </condition>
-  </target>
-
-  <target name="make-server-keystore" if="SSL_AVAILABLE" depends="detect-ssl, detect-tmpdir">
-    <mkdir dir="${test.resources}"/>
-    <exec executable="keytool" failonerror="true" osfamily="unix">
-      <arg line="-import"/>
-      <arg value="-alias"/>
-      <arg value="server1"/>
-      <arg value="-file"/>
-      <arg value="${server.cert}"/>
-      <arg value="-keystore"/>
-      <arg value="${server.keystore}"/>
-      <arg value="-noprompt"/>
-      <arg value="-storepass"/>
-      <arg value="${server.keystore.phrase}"/>
-    </exec>
-    <exec executable="keytool" failonerror="true" osfamily="unix">
-          <arg line="-import"/>
-          <arg value="-alias"/>
-          <arg value="testca"/>
-          <arg value="-trustcacerts"/>
-          <arg value="-file"/>
-          <arg value="${ca.cert}"/>
-          <arg value="-keystore"/>
-          <arg value="${server.keystore}"/>
-          <arg value="-noprompt"/>
-          <arg value="-storepass"/>
-          <arg value="${server.keystore.phrase}"/>
-        </exec>
-  </target>
-
- <target name="make-client-keystore" if="SSL_AVAILABLE" depends="detect-ssl, detect-tmpdir">
-   <mkdir dir="${test.resources}"/>
-     <exec executable="keytool" failonerror="true" osfamily="unix">
-       <arg line="-importkeystore"/>
-       <arg line="-srckeystore" />
-       <arg line="${client.srckeystore}" />
-       <arg value="-srcstoretype"/>
-       <arg value="PKCS12"/>
-       <arg value="-srcstorepass"/>
-       <arg value="${client.srckeystore.password}"/>
-       <arg value="-destkeystore"/>
-       <arg value="${client.keystore}"/>
-       <arg value="-deststoretype"/>
-       <arg value="JKS"/>
-       <arg value="-noprompt"/>
-       <arg value="-storepass"/>
-      <arg value="${client.keystore.phrase}"/>
-    </exec>
-  </target>
-
-  <target name="test-build" depends="clean,build-paho">
-    <mkdir dir="${test.javac.out}"/>
-
-    <javac srcdir="./src"
-      destdir="${test.javac.out}"
-      debug="on"
-      includeantruntime="false" >
-      <classpath>
-        <path refid="test.javac.classpath"/>
-      </classpath>
-    </javac>
-  </target>
-
-  <target name="test-ssl" depends="test-build, make-server-keystore, make-client-keystore" if="SSL_AVAILABLE">
-    <junit printSummary="withOutAndErr"
-      haltOnFailure="true"
-      failureproperty="test.failure"
-      fork="yes">
-      <classpath>
-        <path refid="test.javac.classpath"/>
-        <pathelement path="${test.javac.out}"/>
-        <pathelement path="${test.resources}"/>
-      </classpath>
-      <jvmarg value="-Dhostname=${hostname}"/>
-      <jvmarg value="-Dserver.keystore.passwd=${server.keystore.phrase}"/>
-      <jvmarg value="-Dclient.keystore.passwd=${client.keystore.phrase}"/>
-      <formatter type="plain"/>
-      <formatter type="xml"/>
-      <test todir="${build.out}" name="com.rabbitmq.mqtt.test.tls.MqttSSLTest"/>
-    </junit>
-  </target>
-
-  <target name="test-server" depends="test-build">
-    <junit printSummary="withOutAndErr"
-        haltOnFailure="true"
-        failureproperty="test.failure"
-        fork="yes">
-        <classpath>
-          <path refid="test.javac.classpath"/>
-          <pathelement path="${test.javac.out}"/>
-        </classpath>
-
-      <formatter type="plain"/>
-      <formatter type="xml"/>
-      <test todir="${build.out}" name="com.rabbitmq.mqtt.test.MqttTest"/>
-    </junit>
-  </target>
-
-  <target name="test" depends="test-server, test-ssl" description="Build the test mqtt client libraries.">
-
-  </target>
-
-</project>
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar
deleted file mode 100644 (file)
index 674d71e..0000000
Binary files a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar and /dev/null differ
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/rabbit-test.sh
deleted file mode 100755 (executable)
index b0c6585..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-CTL=$1
-USER="O=client,CN=$(hostname)"
-
-$CTL add_user "$USER" ''
-$CTL set_permissions -p / "$USER" ".*" ".*" ".*"
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh
deleted file mode 100755 (executable)
index 9b2708a..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh -e
-sh -e `dirname $0`/rabbit-test.sh "`dirname $0`/../../rabbitmq-server/scripts/rabbitmqctl -n rabbit-test"
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/MqttTest.java b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/MqttTest.java
deleted file mode 100644 (file)
index ca31b5c..0000000
+++ /dev/null
@@ -1,431 +0,0 @@
-//  The contents of this file are subject to the Mozilla Public License
-//  Version 1.1 (the "License"); you may not use this file except in
-//  compliance with the License. You may obtain a copy of the License
-//  at http://www.mozilla.org/MPL/
-//
-//  Software distributed under the License is distributed on an "AS IS"
-//  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-//  the License for the specific language governing rights and
-//  limitations under the License.
-//
-//  The Original Code is RabbitMQ.
-//
-//  The Initial Developer of the Original Code is GoPivotal, Inc.
-//  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-//
-
-package com.rabbitmq.mqtt.test;
-
-import com.rabbitmq.client.*;
-import junit.framework.Assert;
-import junit.framework.TestCase;
-import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
-import org.eclipse.paho.client.mqttv3.MqttCallback;
-import org.eclipse.paho.client.mqttv3.MqttClient;
-import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
-import org.eclipse.paho.client.mqttv3.MqttDeliveryToken;
-import org.eclipse.paho.client.mqttv3.MqttException;
-import org.eclipse.paho.client.mqttv3.MqttMessage;
-import org.eclipse.paho.client.mqttv3.MqttTopic;
-import org.eclipse.paho.client.mqttv3.internal.NetworkModule;
-import org.eclipse.paho.client.mqttv3.internal.TCPNetworkModule;
-import org.eclipse.paho.client.mqttv3.internal.wire.MqttInputStream;
-import org.eclipse.paho.client.mqttv3.internal.wire.MqttOutputStream;
-import org.eclipse.paho.client.mqttv3.internal.wire.MqttPingReq;
-
-import javax.net.SocketFactory;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.Socket;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.TimeoutException;
-
-/***
- *  MQTT v3.1 tests
- *  TODO: synchronise access to variables
- */
-
-public class MqttTest extends TestCase implements MqttCallback {
-
-    private final String host = "localhost";
-    private final int port = 1883;
-       private final String brokerUrl = "tcp://" + host + ":" + port;
-    private String clientId;
-    private String clientId2;
-    private MqttClient client;
-    private MqttClient client2;
-       private MqttConnectOptions conOpt;
-    private ArrayList<MqttMessage> receivedMessages;
-
-    private final byte[] payload = "payload".getBytes();
-    private final String topic = "test-topic";
-    private int testDelay = 2000;
-    private long lastReceipt;
-    private boolean expectConnectionFailure;
-
-    private ConnectionFactory connectionFactory;
-    private Connection conn;
-    private Channel ch;
-
-    // override 10s limit
-    private class MyConnOpts extends MqttConnectOptions {
-        private int keepAliveInterval = 60;
-        @Override
-        public void setKeepAliveInterval(int keepAliveInterval) {
-            this.keepAliveInterval = keepAliveInterval;
-        }
-        @Override
-        public int getKeepAliveInterval() {
-            return keepAliveInterval;
-        }
-    }
-
-    @Override
-    public void setUp() throws MqttException {
-        clientId = getClass().getSimpleName() + ((int) (10000*Math.random()));
-        clientId2 = clientId + "-2";
-        client = new MqttClient(brokerUrl, clientId, null);
-        client2 = new MqttClient(brokerUrl, clientId2, null);
-        conOpt = new MyConnOpts();
-        setConOpts(conOpt);
-        receivedMessages = new ArrayList();
-        expectConnectionFailure = false;
-    }
-
-    @Override
-    public  void tearDown() throws MqttException {
-        // clean any sticky sessions
-        setConOpts(conOpt);
-        client = new MqttClient(brokerUrl, clientId, null);
-        try {
-            client.connect(conOpt);
-            client.disconnect();
-        } catch (Exception ignored) {}
-
-        client2 = new MqttClient(brokerUrl, clientId2, null);
-        try {
-            client2.connect(conOpt);
-            client2.disconnect();
-        } catch (Exception ignored) {}
-    }
-
-    private void setUpAmqp() throws IOException, TimeoutException {
-        connectionFactory = new ConnectionFactory();
-        connectionFactory.setHost(host);
-        conn = connectionFactory.newConnection();
-        ch = conn.createChannel();
-    }
-
-    private void tearDownAmqp() throws IOException {
-        conn.close();
-    }
-
-    private void setConOpts(MqttConnectOptions conOpts) {
-        // provide authentication if the broker needs it
-        // conOpts.setUserName("guest");
-        // conOpts.setPassword("guest".toCharArray());
-        conOpts.setCleanSession(true);
-        conOpts.setKeepAliveInterval(60);
-    }
-
-    public void testConnectFirst() throws MqttException, IOException, InterruptedException {
-        NetworkModule networkModule = new TCPNetworkModule(SocketFactory.getDefault(), host, port, "");
-        networkModule.start();
-        MqttInputStream  mqttIn  = new MqttInputStream (networkModule.getInputStream());
-        MqttOutputStream mqttOut = new MqttOutputStream(networkModule.getOutputStream());
-        try {
-            mqttOut.write(new MqttPingReq());
-            mqttOut.flush();
-            mqttIn.readMqttWireMessage();
-            fail("Error expected if CONNECT is not first packet");
-        } catch (IOException ignored) {}
-    }
-
-    public void testInvalidUser() throws MqttException {
-        conOpt.setUserName("invalid-user");
-        try {
-            client.connect(conOpt);
-            fail("Authentication failure expected");
-        } catch (MqttException ex) {
-            Assert.assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode());
-        }
-    }
-
-    public void testInvalidPassword() throws MqttException {
-        conOpt.setUserName("invalid-user");
-        conOpt.setPassword("invalid-password".toCharArray());
-        try {
-            client.connect(conOpt);
-            fail("Authentication failure expected");
-        } catch (MqttException ex) {
-            Assert.assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode());
-        }
-    }
-
-
-    public void testSubscribeQos0() throws MqttException, InterruptedException {
-        client.connect(conOpt);
-        client.setCallback(this);
-        client.subscribe(topic, 0);
-
-        publish(client, topic, 0, payload);
-        Thread.sleep(testDelay);
-        Assert.assertEquals(1, receivedMessages.size());
-        Assert.assertEquals(true, Arrays.equals(receivedMessages.get(0).getPayload(), payload));
-        Assert.assertEquals(0, receivedMessages.get(0).getQos());
-        client.disconnect();
-    }
-
-    public void testSubscribeUnsubscribe() throws MqttException, InterruptedException {
-        client.connect(conOpt);
-        client.setCallback(this);
-        client.subscribe(topic, 0);
-
-        publish(client, topic, 1, payload);
-        Thread.sleep(testDelay);
-        Assert.assertEquals(1, receivedMessages.size());
-        Assert.assertEquals(true, Arrays.equals(receivedMessages.get(0).getPayload(), payload));
-        Assert.assertEquals(0, receivedMessages.get(0).getQos());
-
-        client.unsubscribe(topic);
-        publish(client, topic, 0, payload);
-        Thread.sleep(testDelay);
-        Assert.assertEquals(1, receivedMessages.size());
-        client.disconnect();
-    }
-
-    public void testSubscribeQos1() throws MqttException, InterruptedException {
-        client.connect(conOpt);
-        client.setCallback(this);
-        client.subscribe(topic, 1);
-
-        publish(client, topic, 0, payload);
-        publish(client, topic, 1, payload);
-        Thread.sleep(testDelay);
-
-        Assert.assertEquals(2, receivedMessages.size());
-        MqttMessage msg1 = receivedMessages.get(0);
-        MqttMessage msg2 = receivedMessages.get(1);
-
-        Assert.assertEquals(true, Arrays.equals(msg1.getPayload(), payload));
-        Assert.assertEquals(0, msg1.getQos());
-
-        Assert.assertEquals(true, Arrays.equals(msg2.getPayload(), payload));
-        Assert.assertEquals(1, msg2.getQos());
-
-        client.disconnect();
-    }
-
-    public void testTopics() throws MqttException, InterruptedException {
-        client.connect(conOpt);
-        client.setCallback(this);
-        client.subscribe("/+/mid/#");
-        String cases[] = {"/pre/mid2", "/mid", "/a/mid/b/c/d", "/frob/mid"};
-        List<String> expected = Arrays.asList("/a/mid/b/c/d", "/frob/mid");
-        for(String example : cases){
-            publish(client, example, 0, example.getBytes());
-        }
-        Thread.sleep(testDelay);
-        Assert.assertEquals(expected.size(), receivedMessages.size());
-        for (MqttMessage m : receivedMessages){
-            expected.contains(new String(m.getPayload()));
-        }
-        client.disconnect();
-    }
-
-    public void testNonCleanSession() throws MqttException, InterruptedException {
-        conOpt.setCleanSession(false);
-        client.connect(conOpt);
-        client.subscribe(topic, 1);
-        client.disconnect();
-
-        client2.connect(conOpt);
-        publish(client2, topic, 1, payload);
-        client2.disconnect();
-
-        client.setCallback(this);
-        client.connect(conOpt);
-
-        Thread.sleep(testDelay);
-        Assert.assertEquals(1, receivedMessages.size());
-        Assert.assertEquals(true, Arrays.equals(receivedMessages.get(0).getPayload(), payload));
-        client.disconnect();
-    }
-
-    public void testCleanSession() throws MqttException, InterruptedException {
-        conOpt.setCleanSession(false);
-        client.connect(conOpt);
-        client.subscribe(topic, 1);
-        client.disconnect();
-
-        client2.connect(conOpt);
-        publish(client2, topic, 1, payload);
-        client2.disconnect();
-
-        conOpt.setCleanSession(true);
-        client.connect(conOpt);
-        client.setCallback(this);
-        client.subscribe(topic, 1);
-
-        Thread.sleep(testDelay);
-        Assert.assertEquals(0, receivedMessages.size());
-        client.unsubscribe(topic);
-        client.disconnect();
-    }
-
-    public void testMultipleClientIds() throws MqttException, InterruptedException {
-        client.connect(conOpt);
-        client2 = new MqttClient(brokerUrl, clientId, null);
-        client2.connect(conOpt);
-        Thread.sleep(testDelay);
-        Assert.assertFalse(client.isConnected());
-        client2.disconnect();
-    }
-
-    public void testPing() throws MqttException, InterruptedException {
-        conOpt.setKeepAliveInterval(1);
-        client.connect(conOpt);
-        Thread.sleep(3000);
-        Assert.assertEquals(true, client.isConnected());
-        client.disconnect();
-    }
-
-    public void testWill() throws MqttException, InterruptedException, IOException {
-        client2.connect(conOpt);
-        client2.subscribe(topic);
-        client2.setCallback(this);
-
-        final SocketFactory factory = SocketFactory.getDefault();
-        final ArrayList<Socket> sockets = new ArrayList<Socket>();
-        SocketFactory testFactory = new SocketFactory() {
-            public Socket createSocket(String s, int i) throws IOException {
-                Socket sock = factory.createSocket(s, i);
-                sockets.add(sock);
-                return sock;
-            }
-            public Socket createSocket(String s, int i, InetAddress a, int i1) throws IOException {
-                return null;
-            }
-            public Socket createSocket(InetAddress a, int i) throws IOException {
-                return null;
-            }
-            public Socket createSocket(InetAddress a, int i, InetAddress a1, int i1) throws IOException {
-                return null;
-            }
-            @Override
-            public Socket createSocket() throws IOException {
-                Socket sock = new Socket();
-                sockets.add(sock);
-                return sock;
-            }
-        };
-        conOpt.setSocketFactory(testFactory);
-        MqttTopic willTopic = client.getTopic(topic);
-        conOpt.setWill(willTopic, payload, 0, false);
-        conOpt.setCleanSession(false);
-        client.connect(conOpt);
-
-        Assert.assertEquals(1, sockets.size());
-        expectConnectionFailure = true;
-        sockets.get(0).close();
-        Thread.sleep(testDelay);
-
-        Assert.assertEquals(1, receivedMessages.size());
-        Assert.assertEquals(true, Arrays.equals(receivedMessages.get(0).getPayload(), payload));
-        client2.disconnect();
-    }
-
-    public void testSubscribeMultiple() throws MqttException {
-        client.connect(conOpt);
-        publish(client, "/topic/1", 1, "msq1-qos1".getBytes());
-
-        client2.connect(conOpt);
-        client2.setCallback(this);
-        client2.subscribe("/topic/#");
-        client2.subscribe("/topic/#");
-
-        publish(client, "/topic/2", 0, "msq2-qos0".getBytes());
-        publish(client, "/topic/3", 1, "msq3-qos1".getBytes());
-        publish(client, topic, 0, "msq4-qos0".getBytes());
-        publish(client, topic, 1, "msq4-qos1".getBytes());
-
-        Assert.assertEquals(2, receivedMessages.size());
-        client.disconnect();
-        client2.disconnect();
-    }
-
-    public void testPublishMultiple() throws MqttException, InterruptedException {
-        int pubCount = 50;
-        for (int subQos=0; subQos < 2; subQos++){
-            for (int pubQos=0; pubQos < 2; pubQos++){
-                client.connect(conOpt);
-                client.subscribe(topic, subQos);
-                client.setCallback(this);
-                long start = System.currentTimeMillis();
-                for (int i=0; i<pubCount; i++){
-                    publish(client, topic, pubQos, payload);
-                }
-                Thread.sleep(testDelay);
-                Assert.assertEquals(pubCount, receivedMessages.size());
-                System.out.println("publish QOS" + pubQos + " subscribe QOS" + subQos +
-                                   ", " + pubCount + " msgs took " +
-                                   (lastReceipt - start)/1000.0 + "sec");
-                client.disconnect();
-                receivedMessages.clear();
-            }
-        }
-    }
-
-    public void testInteropM2A() throws MqttException, IOException, InterruptedException, TimeoutException {
-        setUpAmqp();
-        String queue = ch.queueDeclare().getQueue();
-        ch.queueBind(queue, "amq.topic", topic);
-
-        client.connect(conOpt);
-        publish(client, topic, 1, payload);
-        client.disconnect();
-        Thread.sleep(testDelay);
-
-        GetResponse response = ch.basicGet(queue, true);
-        assertTrue(Arrays.equals(payload, response.getBody()));
-        assertNull(ch.basicGet(queue, true));
-        tearDownAmqp();
-    }
-
-    public void testInteropA2M() throws MqttException, IOException, InterruptedException, TimeoutException {
-        client.connect(conOpt);
-        client.setCallback(this);
-        client.subscribe(topic, 1);
-
-        setUpAmqp();
-        ch.basicPublish("amq.topic", topic, MessageProperties.MINIMAL_BASIC, payload);
-        tearDownAmqp();
-        Thread.sleep(testDelay);
-
-        Assert.assertEquals(1, receivedMessages.size());
-        client.disconnect();
-    }
-
-    private void publish(MqttClient client, String topicName, int qos, byte[] payload) throws MqttException {
-       MqttTopic topic = client.getTopic(topicName);
-               MqttMessage message = new MqttMessage(payload);
-       message.setQos(qos);
-       MqttDeliveryToken token = topic.publish(message);
-       token.waitForCompletion();
-    }
-
-    public void connectionLost(Throwable cause) {
-        if (!expectConnectionFailure)
-            fail("Connection unexpectedly lost");
-    }
-
-    public void messageArrived(String topic, MqttMessage message) throws Exception {
-        lastReceipt = System.currentTimeMillis();
-        receivedMessages.add(message);
-    }
-
-    public void deliveryComplete(IMqttDeliveryToken token) {
-    }
-}
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/rabbit-test.sh
deleted file mode 100644 (file)
index 3601b4c..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-CTL=$1
-USER="O=client,CN=$(hostname)"
-
-# Test direct connections
-$CTL add_user "$USER" ''
-$CTL set_permissions -p / "$USER" ".*" ".*" ".*"
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh
deleted file mode 100644 (file)
index 9b2708a..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh -e
-sh -e `dirname $0`/rabbit-test.sh "`dirname $0`/../../rabbitmq-server/scripts/rabbitmqctl -n rabbit-test"
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java
deleted file mode 100644 (file)
index 8bf9629..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-//  The contents of this file are subject to the Mozilla Public License
-//  Version 1.1 (the "License"); you may not use this file except in
-//  compliance with the License. You may obtain a copy of the License
-//  at http://www.mozilla.org/MPL/
-//
-//  Software distributed under the License is distributed on an "AS IS"
-//  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-//  the License for the specific language governing rights and
-//  limitations under the License.
-//
-//  The Original Code is RabbitMQ.
-//
-//  The Initial Developer of the Original Code is GoPivotal, Inc.
-//  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-//
-
-package com.rabbitmq.mqtt.test.tls;
-
-import junit.framework.Assert;
-import junit.framework.TestCase;
-import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
-import org.eclipse.paho.client.mqttv3.MqttCallback;
-import org.eclipse.paho.client.mqttv3.MqttClient;
-import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
-import org.eclipse.paho.client.mqttv3.MqttException;
-import org.eclipse.paho.client.mqttv3.MqttMessage;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-
-/**
- * MQTT v3.1 tests
- * TODO: synchronise access to variables
- */
-
-public class MqttSSLTest extends TestCase implements MqttCallback {
-
-    private final int port = 8883;
-    private final String brokerUrl = "ssl://" + getHost() + ":" + port;
-    private String clientId;
-    private String clientId2;
-    private MqttClient client;
-    private MqttClient client2;
-    private MqttConnectOptions conOpt;
-    private ArrayList<MqttMessage> receivedMessages;
-
-    private long lastReceipt;
-    private boolean expectConnectionFailure;
-
-
-    private static final String getHost() {
-        Object host = System.getProperty("hostname");
-        assertNotNull(host);
-        return host.toString();
-    }
-
-    // override 10s limit
-    private class MyConnOpts extends MqttConnectOptions {
-        private int keepAliveInterval = 60;
-
-        @Override
-        public void setKeepAliveInterval(int keepAliveInterval) {
-            this.keepAliveInterval = keepAliveInterval;
-        }
-
-        @Override
-        public int getKeepAliveInterval() {
-            return keepAliveInterval;
-        }
-    }
-
-
-    @Override
-    public void setUp() throws MqttException, IOException {
-        clientId = getClass().getSimpleName() + ((int) (10000 * Math.random()));
-        clientId2 = clientId + "-2";
-        client = new MqttClient(brokerUrl, clientId, null);
-        client2 = new MqttClient(brokerUrl, clientId2, null);
-        conOpt = new MyConnOpts();
-        conOpt.setSocketFactory(MutualAuth.getSSLContextWithoutCert().getSocketFactory());
-        setConOpts(conOpt);
-        receivedMessages = new ArrayList<MqttMessage>();
-        expectConnectionFailure = false;
-    }
-
-    @Override
-    public void tearDown() throws MqttException {
-        // clean any sticky sessions
-        setConOpts(conOpt);
-        client = new MqttClient(brokerUrl, clientId, null);
-        try {
-            client.connect(conOpt);
-            client.disconnect();
-        } catch (Exception ignored) {
-        }
-
-        client2 = new MqttClient(brokerUrl, clientId2, null);
-        try {
-            client2.connect(conOpt);
-            client2.disconnect();
-        } catch (Exception ignored) {
-        }
-    }
-
-
-    private void setConOpts(MqttConnectOptions conOpts) {
-        // provide authentication if the broker needs it
-        // conOpts.setUserName("guest");
-        // conOpts.setPassword("guest".toCharArray());
-        conOpts.setCleanSession(true);
-        conOpts.setKeepAliveInterval(60);
-    }
-
-    public void testCertLogin() throws MqttException {
-        try {
-            conOpt.setSocketFactory(MutualAuth.getSSLContextWithClientCert().getSocketFactory());
-            client.connect(conOpt);
-        } catch (Exception e) {
-            e.printStackTrace();
-            fail("Exception: " + e.getMessage());
-        }
-    }
-
-
-    public void testInvalidUser() throws MqttException {
-        conOpt.setUserName("invalid-user");
-        try {
-            client.connect(conOpt);
-            fail("Authentication failure expected");
-        } catch (MqttException ex) {
-            Assert.assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode());
-        } catch (Exception e) {
-            e.printStackTrace();
-            fail("Exception: " + e.getMessage());
-        }
-    }
-
-    public void testInvalidPassword() throws MqttException {
-        conOpt.setUserName("invalid-user");
-        conOpt.setPassword("invalid-password".toCharArray());
-        try {
-            client.connect(conOpt);
-            fail("Authentication failure expected");
-        } catch (MqttException ex) {
-            Assert.assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode());
-        } catch (Exception e) {
-            e.printStackTrace();
-            fail("Exception: " + e.getMessage());
-        }
-    }
-
-
-    public void connectionLost(Throwable cause) {
-        if (!expectConnectionFailure)
-            fail("Connection unexpectedly lost");
-    }
-
-    public void messageArrived(String topic, MqttMessage message) throws Exception {
-        lastReceipt = System.currentTimeMillis();
-        receivedMessages.add(message);
-    }
-
-    public void deliveryComplete(IMqttDeliveryToken token) {
-    }
-}
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/tls/MutualAuth.java b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/tls/MutualAuth.java
deleted file mode 100644 (file)
index a2d5d25..0000000
+++ /dev/null
@@ -1,84 +0,0 @@
-package com.rabbitmq.mqtt.test.tls;
-
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.TrustManagerFactory;
-import java.io.IOException;
-import java.security.KeyStore;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-import java.security.cert.CertificateException;
-import java.util.Arrays;
-import java.util.List;
-
-public class MutualAuth {
-
-    private MutualAuth() {
-
-    }
-
-    private static String getStringProperty(String propertyName) throws IllegalArgumentException {
-        Object value = System.getProperty(propertyName);
-        if (value == null) throw new IllegalArgumentException("Property: " + propertyName + " not found");
-        return value.toString();
-    }
-
-    private static TrustManagerFactory getServerTrustManagerFactory() throws NoSuchAlgorithmException, CertificateException, IOException, KeyStoreException {
-        char[] trustPhrase = getStringProperty("server.keystore.passwd").toCharArray();
-        MutualAuth dummy = new MutualAuth();
-
-        // Server TrustStore
-        KeyStore tks = KeyStore.getInstance("JKS");
-        tks.load(dummy.getClass().getResourceAsStream("/server.jks"), trustPhrase);
-
-        TrustManagerFactory tmf = TrustManagerFactory.getInstance("X509");
-        tmf.init(tks);
-
-        return tmf;
-    }
-
-    public static SSLContext getSSLContextWithClientCert() throws IOException {
-
-        char[] clientPhrase = getStringProperty("client.keystore.passwd").toCharArray();
-
-        MutualAuth dummy = new MutualAuth();
-        try {
-            SSLContext sslContext = getVanillaSSLContext();
-            // Client Keystore
-            KeyStore ks = KeyStore.getInstance("JKS");
-            ks.load(dummy.getClass().getResourceAsStream("/client.jks"), clientPhrase);
-            KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
-            kmf.init(ks, clientPhrase);
-
-            sslContext.init(kmf.getKeyManagers(), getServerTrustManagerFactory().getTrustManagers(), null);
-            return sslContext;
-        } catch (Exception e) {
-            throw new IOException(e);
-        }
-
-    }
-
-    private static SSLContext getVanillaSSLContext() throws NoSuchAlgorithmException {
-        SSLContext result = null;
-        List<String> xs = Arrays.asList("TLSv1.2", "TLSv1.1", "TLSv1");
-        for(String x : xs) {
-            try {
-                return SSLContext.getInstance(x);
-            } catch (NoSuchAlgorithmException nae) {
-                // keep trying
-            }
-        }
-        throw new NoSuchAlgorithmException("Could not obtain an SSLContext for TLS 1.0-1.2");
-    }
-
-    public static SSLContext getSSLContextWithoutCert() throws IOException {
-        try {
-            SSLContext sslContext = getVanillaSSLContext();
-            sslContext.init(null, getServerTrustManagerFactory().getTrustManagers(), null);
-            return sslContext;
-        } catch (Exception e) {
-            throw new IOException(e);
-        }
-    }
-
-}
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/rabbit_mqtt_util_tests.erl b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/rabbit_mqtt_util_tests.erl
deleted file mode 100644 (file)
index be307bf..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_mqtt_util_tests).
-
--include_lib("eunit/include/eunit.hrl").
-
-all_test_() ->
-    {setup,
-     fun setup/0,
-     [fun coerce_exchange/0,
-      fun coerce_vhost/0,
-      fun coerce_default_user/0,
-      fun coerce_default_pass/0]}.
-
-setup() ->
-    application:load(rabbitmq_mqtt).
-
-coerce_exchange() ->
-    ?assertEqual(<<"amq.topic">>, rabbit_mqtt_util:env(exchange)).
-
-coerce_vhost() ->
-    ?assertEqual(<<"/">>, rabbit_mqtt_util:env(vhost)).
-
-coerce_default_user() ->
-    ?assertEqual(<<"guest_user">>, rabbit_mqtt_util:env(default_user)).
-
-coerce_default_pass() ->
-    ?assertEqual(<<"guest_pass">>, rabbit_mqtt_util:env(default_pass)).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/rabbitmq_mqtt_standalone.app.src b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/rabbitmq_mqtt_standalone.app.src
deleted file mode 100644 (file)
index 1c9fcb4..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-{application, rabbitmq_mqtt,
- [{description, "RabbitMQ MQTT Adapter"},
-  {vsn, "%%VSN%%"},
-  {modules, []},
-  {registered, []},
-  {mod, {rabbit_mqtt, []}},
-  {env, [{default_user, "guest_user"},
-         {default_pass, "guest_pass"},
-         {ssl_cert_login,false},
-         {allow_anonymous, true},
-         {vhost, "/"},
-         {exchange, "amq.topic"},
-         {subscription_ttl, 1800000}, % 30 min
-         {prefetch, 10},
-         {ssl_listeners, []},
-         {tcp_listeners, [1883]},
-         {tcp_listen_options, [binary,
-                               {packet,    raw},
-                               {reuseaddr, true},
-                               {backlog,   128},
-                               {nodelay,   true}]}]},
-  {applications, [kernel, stdlib, rabbit, amqp_client]}]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/test.config b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/test.config
deleted file mode 100644 (file)
index 3d6baff..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-[{rabbitmq_mqtt, [
-   {ssl_cert_login,   true},
-   {allow_anonymous,  true},
-   {tcp_listeners,    [1883]},
-   {ssl_listeners,    [8883]}
-   ]},
- {rabbit, [{ssl_options, [{cacertfile,"%%CERTS_DIR%%/testca/cacert.pem"},
-                          {certfile,"%%CERTS_DIR%%/server/cert.pem"},
-                          {keyfile,"%%CERTS_DIR%%/server/key.pem"},
-                          {verify,verify_peer},
-                          {fail_if_no_peer_cert,false}
-                         ]}
-          ]}
-].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/test.sh
deleted file mode 100755 (executable)
index ae60a49..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-$MAKE -C `dirname $0` build_java_amqp
-$MAKE -C `dirname $0` test
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/README b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/README
deleted file mode 100644 (file)
index 8d6d3cc..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-Adds information on shovel status to the management plugin. Build it
-like any other plugin.
-
-If you have a heterogenous cluster (where the nodes have different
-plugins installed), this should be installed on the same nodes as the
-management plugin.
-
-Strictly speaking the shovel does not need to be installed, but then
-it won't tell you much.
-
-The HTTP API is very simple: GET /api/shovels.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/etc/rabbit-test.config b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/etc/rabbit-test.config
deleted file mode 100644 (file)
index a0546a2..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-%% We test sample retention separately in rabbit_mgmt_test_db_unit,
-%% but for rabbit_mgmt_test_db we want to make sure samples never
-%% expire.
-[
- {rabbitmq_shovel,
-  [{shovels,
-     [{'my-static',
-       [{sources, [{broker, "amqp://"},
-                   {declarations, [{'queue.declare', [{queue, <<"static">>}]}]}
-                  ]},
-         {destinations, [{broker, "amqp://"}]},
-         {queue, <<"static">>},
-         {publish_fields, [ {exchange, <<"">>},
-                            {routing_key, <<"static2">>}
-                          ]}
-       ]}
-      ]}
-    ]}
-].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk
deleted file mode 100644 (file)
index 6c3bac2..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-management rabbitmq-shovel
-WITH_BROKER_TEST_COMMANDS:=rabbit_shovel_mgmt_test_all:all_tests()
-WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/etc/rabbit-test
-
-CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f)
-define construct_app_commands
-       cp -r $(PACKAGE_DIR)/priv $(APP_DIR)
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/test/src/rabbit_shovel_mgmt_test_all.erl b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/test/src/rabbit_shovel_mgmt_test_all.erl
deleted file mode 100644 (file)
index b82c4e1..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_shovel_mgmt_test_all).
-
--export([all_tests/0]).
-
-all_tests() ->
-    ok = eunit:test(tests(rabbit_shovel_mgmt_test_http, 60), [verbose]).
-
-tests(Module, Timeout) ->
-    {foreach, fun() -> ok end,
-     [{timeout, Timeout, fun () -> Module:F() end} ||
-         {F, _Arity} <- proplists:get_value(exports, Module:module_info()),
-         string:right(atom_to_list(F), 5) =:= "_test"]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/test/src/rabbit_shovel_mgmt_test_http.erl b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/test/src/rabbit_shovel_mgmt_test_http.erl
deleted file mode 100644 (file)
index b3407ce..0000000
+++ /dev/null
@@ -1,217 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_shovel_mgmt_test_http).
-
--include_lib("rabbitmq_management/include/rabbit_mgmt_test.hrl").
-
--import(rabbit_misc, [pget/2]).
-
-shovels_test() ->
-    http_put("/users/admin",  [{password, <<"admin">>},
-                               {tags, <<"administrator">>}], ?NO_CONTENT),
-    http_put("/users/mon",    [{password, <<"mon">>},
-                               {tags, <<"monitoring">>}], ?NO_CONTENT),
-    http_put("/vhosts/v", none, ?NO_CONTENT),
-    Perms = [{configure, <<".*">>},
-             {write,     <<".*">>},
-             {read,      <<".*">>}],
-    http_put("/permissions/v/guest",  Perms, ?NO_CONTENT),
-    http_put("/permissions/v/admin",  Perms, ?NO_CONTENT),
-    http_put("/permissions/v/mon",    Perms, ?NO_CONTENT),
-
-    [http_put("/parameters/shovel/" ++ V ++ "/my-dynamic",
-              [{value, [{'src-uri', <<"amqp://">>},
-                        {'dest-uri', <<"amqp://">>},
-                        {'src-queue', <<"test">>},
-                        {'dest-queue', <<"test2">>}]}], ?NO_CONTENT)
-     || V <- ["%2f", "v"]],
-    Static = [{name,  <<"my-static">>},
-              {type,  <<"static">>}],
-    Dynamic1 = [{name,  <<"my-dynamic">>},
-                {vhost, <<"/">>},
-                {type,  <<"dynamic">>}],
-    Dynamic2 = [{name,  <<"my-dynamic">>},
-                {vhost, <<"v">>},
-                {type,  <<"dynamic">>}],
-    Assert = fun (Req, User, Res) ->
-                     assert_list(Res, http_get(Req, User, User, ?OK))
-             end,
-    Assert("/shovels",     "guest", [Static, Dynamic1, Dynamic2]),
-    Assert("/shovels/%2f", "guest", [Dynamic1]),
-    Assert("/shovels/v",   "guest", [Dynamic2]),
-    Assert("/shovels",     "admin", [Static, Dynamic2]),
-    Assert("/shovels/%2f", "admin", []),
-    Assert("/shovels/v",   "admin", [Dynamic2]),
-    Assert("/shovels",     "mon", [Dynamic2]),
-    Assert("/shovels/%2f", "mon", []),
-    Assert("/shovels/v",   "mon", [Dynamic2]),
-
-    http_delete("/vhosts/v", ?NO_CONTENT),
-    http_delete("/users/admin", ?NO_CONTENT),
-    http_delete("/users/mon", ?NO_CONTENT),
-    ok.
-
-%% It's a bit arbitrary to be testing this here, but we want to be
-%% able to test that mgmt extensions can be started and stopped
-%% *somewhere*, and here is as good a place as any.
-dynamic_plugin_enable_disable_test() ->
-    http_get("/shovels", ?OK),
-    disable_plugin("rabbitmq_shovel_management"),
-    http_get("/shovels", ?NOT_FOUND),
-    http_get("/overview", ?OK),
-    disable_plugin("rabbitmq_management"),
-    http_fail("/shovels"),
-    http_fail("/overview"),
-    enable_plugin("rabbitmq_management"),
-    http_get("/shovels", ?NOT_FOUND),
-    http_get("/overview", ?OK),
-    enable_plugin("rabbitmq_shovel_management"),
-    http_get("/shovels", ?OK),
-    http_get("/overview", ?OK),
-    passed.
-
-%%---------------------------------------------------------------------------
-%% TODO this is mostly copypasta from the mgmt tests
-
-http_get(Path) ->
-    http_get(Path, ?OK).
-
-http_get(Path, CodeExp) ->
-    http_get(Path, "guest", "guest", CodeExp).
-
-http_get(Path, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(get, Path, [auth_header(User, Pass)]),
-    assert_code(CodeExp, CodeAct, "GET", Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-http_fail(Path) ->
-    {error, {failed_connect, _}} = req(get, Path, []).
-
-http_put(Path, List, CodeExp) ->
-    http_put_raw(Path, format_for_upload(List), CodeExp).
-
-http_put(Path, List, User, Pass, CodeExp) ->
-    http_put_raw(Path, format_for_upload(List), User, Pass, CodeExp).
-
-http_post(Path, List, CodeExp) ->
-    http_post_raw(Path, format_for_upload(List), CodeExp).
-
-http_post(Path, List, User, Pass, CodeExp) ->
-    http_post_raw(Path, format_for_upload(List), User, Pass, CodeExp).
-
-format_for_upload(none) ->
-    <<"">>;
-format_for_upload(List) ->
-    iolist_to_binary(mochijson2:encode({struct, List})).
-
-http_put_raw(Path, Body, CodeExp) ->
-    http_upload_raw(put, Path, Body, "guest", "guest", CodeExp).
-
-http_put_raw(Path, Body, User, Pass, CodeExp) ->
-    http_upload_raw(put, Path, Body, User, Pass, CodeExp).
-
-http_post_raw(Path, Body, CodeExp) ->
-    http_upload_raw(post, Path, Body, "guest", "guest", CodeExp).
-
-http_post_raw(Path, Body, User, Pass, CodeExp) ->
-    http_upload_raw(post, Path, Body, User, Pass, CodeExp).
-
-http_upload_raw(Type, Path, Body, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(Type, Path, [auth_header(User, Pass)], Body),
-    assert_code(CodeExp, CodeAct, Type, Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-http_delete(Path, CodeExp) ->
-    http_delete(Path, "guest", "guest", CodeExp).
-
-http_delete(Path, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(delete, Path, [auth_header(User, Pass)]),
-    assert_code(CodeExp, CodeAct, "DELETE", Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-assert_code(CodeExp, CodeAct, Type, Path, Body) ->
-    case CodeExp of
-        CodeAct -> ok;
-        _       -> throw({expected, CodeExp, got, CodeAct, type, Type,
-                          path, Path, body, Body})
-    end.
-
-req(Type, Path, Headers) ->
-    httpc:request(Type, {?PREFIX ++ Path, Headers}, ?HTTPC_OPTS, []).
-
-req(Type, Path, Headers, Body) ->
-    httpc:request(Type, {?PREFIX ++ Path, Headers, "application/json", Body},
-                  ?HTTPC_OPTS, []).
-
-decode(?OK, _Headers,  ResBody) -> cleanup(mochijson2:decode(ResBody));
-decode(_,    Headers, _ResBody) -> Headers.
-
-cleanup(L) when is_list(L) ->
-    [cleanup(I) || I <- L];
-cleanup({struct, I}) ->
-    cleanup(I);
-cleanup({K, V}) when is_binary(K) ->
-    {list_to_atom(binary_to_list(K)), cleanup(V)};
-cleanup(I) ->
-    I.
-
-auth_header(Username, Password) ->
-    {"Authorization",
-     "Basic " ++ binary_to_list(base64:encode(Username ++ ":" ++ Password))}.
-
-assert_list(Exp, Act) ->
-    case length(Exp) == length(Act) of
-        true  -> ok;
-        false -> throw({expected, Exp, actual, Act})
-    end,
-    [case length(lists:filter(fun(ActI) -> test_item(ExpI, ActI) end, Act)) of
-         1 -> ok;
-         N -> throw({found, N, ExpI, in, Act})
-     end || ExpI <- Exp].
-
-assert_item(Exp, Act) ->
-    case test_item0(Exp, Act) of
-        [] -> ok;
-        Or -> throw(Or)
-    end.
-
-test_item(Exp, Act) ->
-    case test_item0(Exp, Act) of
-        [] -> true;
-        _  -> false
-    end.
-
-test_item0(Exp, Act) ->
-    [{did_not_find, ExpI, in, Act} || ExpI <- Exp,
-                                      not lists:member(ExpI, Act)].
-%%---------------------------------------------------------------------------
-
-enable_plugin(Plugin) ->
-    plugins_action(enable, [Plugin], []).
-
-disable_plugin(Plugin) ->
-    plugins_action(disable, [Plugin], []).
-
-plugins_action(Command, Args, Opts) ->
-    PluginsFile = os:getenv("RABBITMQ_ENABLED_PLUGINS_FILE"),
-    PluginsDir = os:getenv("RABBITMQ_PLUGINS_DIR"),
-    Node = node(),
-    rpc:call(Node, rabbit_plugins_main, action,
-             [Command, Node, Args, Opts, PluginsFile, PluginsDir]).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-shovel/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-shovel/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile b/rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/README b/rabbitmq-server/plugins-src/rabbitmq-shovel/README
deleted file mode 100644 (file)
index 1d7d1b0..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-Generic build instructions are at:
-        http://www.rabbitmq.com/plugin-development.html
-
-See the http://www.rabbitmq.com/shovel.html page for full instructions.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps b/rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps
deleted file mode 100644 (file)
index 29587b5..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
--mode(compile).
-
-main([IncludeDir, ErlDir, EbinDir, TargetFile]) ->
-    ErlDirContents = filelib:wildcard("*.erl", ErlDir),
-    ErlFiles = [filename:join(ErlDir, FileName) || FileName <- ErlDirContents],
-    Modules = sets:from_list(
-                [list_to_atom(filename:basename(FileName, ".erl")) ||
-                    FileName <- ErlDirContents]),
-    Headers = sets:from_list(
-                [filename:join(IncludeDir, FileName) ||
-                    FileName <- filelib:wildcard("*.hrl", IncludeDir)]),
-    Deps = lists:foldl(
-             fun (Path, Deps1) ->
-                     dict:store(Path, detect_deps(IncludeDir, EbinDir,
-                                                  Modules, Headers, Path),
-                                Deps1)
-             end, dict:new(), ErlFiles),
-    {ok, Hdl} = file:open(TargetFile, [write, delayed_write]),
-    dict:fold(
-      fun (_Path, [], ok) ->
-              ok;
-          (Path, Dep, ok) ->
-              Module = filename:basename(Path, ".erl"),
-              ok = file:write(Hdl, [EbinDir, "/", Module, ".beam: ",
-                                   Path]),
-              ok = sets:fold(fun (E, ok) -> file:write(Hdl, [" ", E]) end,
-                             ok, Dep),
-              file:write(Hdl, ["\n"])
-      end, ok, Deps),
-    ok = file:write(Hdl, [TargetFile, ": ", escript:script_name(), "\n"]),
-    ok = file:sync(Hdl),
-    ok = file:close(Hdl).
-
-detect_deps(IncludeDir, EbinDir, Modules, Headers, Path) ->
-    {ok, Forms} = epp:parse_file(Path, [IncludeDir], [{use_specs, true}]),
-    lists:foldl(
-      fun ({attribute, _LineNumber, Attribute, Behaviour}, Deps)
-          when Attribute =:= behaviour orelse Attribute =:= behavior ->
-              case sets:is_element(Behaviour, Modules) of
-                  true  -> sets:add_element(
-                             [EbinDir, "/", atom_to_list(Behaviour), ".beam"],
-                             Deps);
-                  false -> Deps
-              end;
-          ({attribute, _LineNumber, file, {FileName, _LineNumber1}}, Deps) ->
-              case sets:is_element(FileName, Headers) of
-                  true  -> sets:add_element(FileName, Deps);
-                  false -> Deps
-              end;
-          (_Form, Deps) ->
-              Deps
-      end, sets:new(), Forms).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk b/rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk
deleted file mode 100644 (file)
index 6cf8254..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-erlang-client
-WITH_BROKER_TEST_COMMANDS:=rabbit_shovel_test_all:all_tests()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test.erl b/rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test.erl
deleted file mode 100644 (file)
index 6619112..0000000
+++ /dev/null
@@ -1,250 +0,0 @@
-%%  The contents of this file are subject to the Mozilla Public License
-%%  Version 1.1 (the "License"); you may not use this file except in
-%%  compliance with the License. You may obtain a copy of the License
-%%  at http://www.mozilla.org/MPL/
-%%
-%%  Software distributed under the License is distributed on an "AS IS"
-%%  basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%%  the License for the specific language governing rights and
-%%  limitations under the License.
-%%
-%%  The Original Code is RabbitMQ.
-%%
-%%  The Initial Developer of the Original Code is GoPivotal, Inc.
-%%  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_shovel_test).
--export([test/0]).
--include_lib("amqp_client/include/amqp_client.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--define(EXCHANGE,    <<"test_exchange">>).
--define(TO_SHOVEL,   <<"to_the_shovel">>).
--define(FROM_SHOVEL, <<"from_the_shovel">>).
--define(UNSHOVELLED, <<"unshovelled">>).
--define(SHOVELLED,   <<"shovelled">>).
--define(TIMEOUT,     1000).
-
-main_test() ->
-    %% it may already be running. Stop if possible
-    application:stop(rabbitmq_shovel),
-
-    %% shovel can be started with zero shovels configured
-    ok = application:start(rabbitmq_shovel),
-    ok = application:stop(rabbitmq_shovel),
-
-    %% various ways of breaking the config
-    require_list_of_shovel_configurations =
-        test_broken_shovel_configs(invalid_config),
-
-    require_list_of_shovel_configurations =
-        test_broken_shovel_configs([{test_shovel, invalid_shovel_config}]),
-
-    Config = [{sources, [{broker, "amqp://"}]},
-              {destinations, [{broker, "amqp://"}]},
-              {queue, <<"">>}],
-
-    {duplicate_shovel_definition, test_shovel} =
-        test_broken_shovel_configs(
-          [{test_shovel, Config}, {test_shovel, Config}]),
-
-    {invalid_parameters, [{invalid, invalid, invalid}]} =
-        test_broken_shovel_config([{invalid, invalid, invalid} | Config]),
-
-    {duplicate_parameters, [queue]} =
-        test_broken_shovel_config([{queue, <<"">>} | Config]),
-
-    {missing_parameters, Missing} =
-        test_broken_shovel_config([]),
-    [destinations, queue, sources] = lists:sort(Missing),
-
-    {unrecognised_parameters, [invalid]} =
-        test_broken_shovel_config([{invalid, invalid} | Config]),
-
-    {require_list, invalid} =
-        test_broken_shovel_sources(invalid),
-
-    {missing_endpoint_parameter, broker_or_brokers} =
-        test_broken_shovel_sources([]),
-
-    {expected_list, brokers, invalid} =
-        test_broken_shovel_sources([{brokers, invalid}]),
-
-    {expected_string_uri, 42} =
-        test_broken_shovel_sources([{brokers, [42]}]),
-
-    {{unexpected_uri_scheme, "invalid"}, "invalid://"} =
-        test_broken_shovel_sources([{broker, "invalid://"}]),
-
-    {{unable_to_parse_uri, no_scheme}, "invalid"} =
-        test_broken_shovel_sources([{broker, "invalid"}]),
-
-    {expected_list,declarations, invalid} =
-        test_broken_shovel_sources([{broker, "amqp://"},
-                                    {declarations, invalid}]),
-    {unknown_method_name, 42} =
-        test_broken_shovel_sources([{broker, "amqp://"},
-                                    {declarations, [42]}]),
-
-    {expected_method_field_list, 'queue.declare', 42} =
-        test_broken_shovel_sources([{broker, "amqp://"},
-                                    {declarations, [{'queue.declare', 42}]}]),
-
-    {unknown_fields, 'queue.declare', [invalid]} =
-        test_broken_shovel_sources(
-          [{broker, "amqp://"},
-           {declarations, [{'queue.declare', [invalid]}]}]),
-
-    {{invalid_amqp_params_parameter, heartbeat, "text",
-      [{"heartbeat", "text"}], {not_an_integer, "text"}}, _} =
-        test_broken_shovel_sources(
-          [{broker, "amqp://localhost/?heartbeat=text"}]),
-
-    {{invalid_amqp_params_parameter, username, "text",
-      [{"username", "text"}],
-      {parameter_unconfigurable_in_query, username, "text"}}, _} =
-        test_broken_shovel_sources([{broker, "amqp://?username=text"}]),
-
-    {invalid_parameter_value, prefetch_count,
-     {require_non_negative_integer, invalid}} =
-        test_broken_shovel_config([{prefetch_count, invalid} | Config]),
-
-    {invalid_parameter_value, ack_mode,
-     {ack_mode_value_requires_one_of,
-      {no_ack, on_publish, on_confirm}, invalid}} =
-        test_broken_shovel_config([{ack_mode, invalid} | Config]),
-
-    {invalid_parameter_value, queue,
-     {require_binary, invalid}} =
-        test_broken_shovel_config([{sources, [{broker, "amqp://"}]},
-                                   {destinations, [{broker, "amqp://"}]},
-                                   {queue, invalid}]),
-
-    {invalid_parameter_value, publish_properties,
-     {require_list, invalid}} =
-        test_broken_shovel_config([{publish_properties, invalid} | Config]),
-
-    {invalid_parameter_value, publish_properties,
-     {unexpected_fields, [invalid], _}} =
-        test_broken_shovel_config([{publish_properties, [invalid]} | Config]),
-
-    {{invalid_ssl_parameter, fail_if_no_peer_cert, "42", _,
-      {require_boolean, '42'}}, _} =
-        test_broken_shovel_sources([{broker, "amqps://username:password@host:5673/vhost?cacertfile=/path/to/cacert.pem&certfile=/path/to/certfile.pem&keyfile=/path/to/keyfile.pem&verify=verify_peer&fail_if_no_peer_cert=42"}]),
-
-    %% a working config
-    application:set_env(
-      rabbitmq_shovel,
-      shovels,
-      [{test_shovel,
-        [{sources,
-          [{broker, "amqp:///%2f?heartbeat=5"},
-           {declarations,
-            [{'queue.declare',    [exclusive, auto_delete]},
-             {'exchange.declare', [{exchange, ?EXCHANGE}, auto_delete]},
-             {'queue.bind',       [{queue, <<>>}, {exchange, ?EXCHANGE},
-                                   {routing_key, ?TO_SHOVEL}]}
-            ]}]},
-         {destinations,
-          [{broker, "amqp:///%2f"}]},
-         {queue, <<>>},
-         {ack_mode, on_confirm},
-         {publish_fields, [{exchange, ?EXCHANGE}, {routing_key, ?FROM_SHOVEL}]},
-         {publish_properties, [{delivery_mode, 2},
-                               {cluster_id,    <<"my-cluster">>},
-                               {content_type,  ?SHOVELLED}]},
-         {add_forward_headers, true}
-        ]}],
-      infinity),
-
-    ok = application:start(rabbitmq_shovel),
-
-    await_running_shovel(test_shovel),
-
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Chan} = amqp_connection:open_channel(Conn),
-
-    #'queue.declare_ok'{ queue = Q } =
-        amqp_channel:call(Chan, #'queue.declare' { exclusive = true }),
-    #'queue.bind_ok'{} =
-        amqp_channel:call(Chan, #'queue.bind' { queue = Q, exchange = ?EXCHANGE,
-                                                routing_key = ?FROM_SHOVEL }),
-    #'queue.bind_ok'{} =
-        amqp_channel:call(Chan, #'queue.bind' { queue = Q, exchange = ?EXCHANGE,
-                                                routing_key = ?TO_SHOVEL }),
-
-    #'basic.consume_ok'{ consumer_tag = CTag } =
-        amqp_channel:subscribe(Chan,
-                               #'basic.consume' { queue = Q, exclusive = true },
-                               self()),
-    receive
-        #'basic.consume_ok'{ consumer_tag = CTag } -> ok
-    after ?TIMEOUT -> throw(timeout_waiting_for_consume_ok)
-    end,
-
-    ok = amqp_channel:call(Chan,
-                           #'basic.publish' { exchange    = ?EXCHANGE,
-                                              routing_key = ?TO_SHOVEL },
-                           #amqp_msg { payload = <<42>>,
-                                       props   = #'P_basic' {
-                                         delivery_mode = 2,
-                                         content_type  = ?UNSHOVELLED }
-                                     }),
-
-    receive
-        {#'basic.deliver' { consumer_tag = CTag, delivery_tag = AckTag,
-                            routing_key = ?FROM_SHOVEL },
-         #amqp_msg { payload = <<42>>,
-                     props   = #'P_basic' { delivery_mode = 2,
-                                            content_type  = ?SHOVELLED,
-                                            headers       = [{<<"x-shovelled">>,
-                                                              _, _}]}
-                   }} ->
-            ok = amqp_channel:call(Chan, #'basic.ack'{ delivery_tag = AckTag })
-    after ?TIMEOUT -> throw(timeout_waiting_for_deliver1)
-    end,
-
-    [{test_shovel, static, {running, _Info}, _Time}] =
-        rabbit_shovel_status:status(),
-
-    receive
-        {#'basic.deliver' { consumer_tag = CTag, delivery_tag = AckTag1,
-                            routing_key = ?TO_SHOVEL },
-         #amqp_msg { payload = <<42>>,
-                     props   = #'P_basic' { delivery_mode = 2,
-                                            content_type  = ?UNSHOVELLED }
-                   }} ->
-            ok = amqp_channel:call(Chan, #'basic.ack'{ delivery_tag = AckTag1 })
-    after ?TIMEOUT -> throw(timeout_waiting_for_deliver2)
-    end,
-
-    amqp_channel:close(Chan),
-    amqp_connection:close(Conn),
-
-    ok.
-
-test_broken_shovel_configs(Configs) ->
-    application:set_env(rabbitmq_shovel, shovels, Configs),
-    {error, {Error, _}} = application:start(rabbitmq_shovel),
-    Error.
-
-test_broken_shovel_config(Config) ->
-    {invalid_shovel_configuration, test_shovel, Error} =
-        test_broken_shovel_configs([{test_shovel, Config}]),
-    Error.
-
-test_broken_shovel_sources(Sources) ->
-    {invalid_parameter_value, sources, Error} =
-        test_broken_shovel_config([{sources, Sources},
-                                   {destinations, [{broker, "amqp://"}]},
-                                   {queue, <<"">>}]),
-    Error.
-
-await_running_shovel(Name) ->
-    case [Name || {Name, _, {running, _}, _}
-                      <- rabbit_shovel_status:status()] of
-        [_] -> ok;
-        _   -> timer:sleep(100),
-               await_running_shovel(Name)
-    end.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test_all.erl b/rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test_all.erl
deleted file mode 100644 (file)
index 2269ea8..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_shovel_test_all).
-
--export([all_tests/0]).
-
-all_tests() ->
-    ok = eunit:test(tests(rabbit_shovel_test, 60), [verbose]),
-    ok = eunit:test(tests(rabbit_shovel_test_dyn, 60), [verbose]).
-
-tests(Module, Timeout) ->
-    {foreach, fun() -> ok end,
-     [{timeout, Timeout, fun () -> Module:F() end} || F <- funs(Module, "_test")] ++
-         [{timeout, Timeout, Fun} || Gen <- funs(Module, "_test_"),
-                                     Fun <- Module:Gen()]}.
-
-funs(Module, Suffix) ->
-    [F || {F, _Arity} <- proplists:get_value(exports, Module:module_info()),
-          string:right(atom_to_list(F), length(Suffix)) =:= Suffix].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test_dyn.erl b/rabbitmq-server/plugins-src/rabbitmq-shovel/test/src/rabbit_shovel_test_dyn.erl
deleted file mode 100644 (file)
index b3c74d8..0000000
+++ /dev/null
@@ -1,293 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ Federation.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_shovel_test_dyn).
-
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
-
-simple_test() ->
-    with_ch(
-      fun (Ch) ->
-              set_param(<<"test">>, [{<<"src-queue">>,  <<"src">>},
-                                     {<<"dest-queue">>, <<"dest">>}]),
-              publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hello">>)
-      end).
-
-set_properties_test() ->
-    with_ch(
-      fun (Ch) ->
-              Ps = [{<<"src-queue">>,      <<"src">>},
-                    {<<"dest-queue">>,     <<"dest">>},
-                    {<<"publish-properties">>, [{<<"cluster_id">>, <<"x">>}]}],
-              set_param(<<"test">>, Ps),
-              #amqp_msg{props = #'P_basic'{cluster_id = Cluster}} =
-                  publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hi">>),
-              ?assertEqual(<<"x">>, Cluster)
-      end).
-
-exchange_test() ->
-    with_ch(
-      fun (Ch) ->
-              amqp_channel:call(Ch, #'queue.declare'{queue   = <<"queue">>,
-                                                     durable = true}),
-              amqp_channel:call(
-                Ch, #'queue.bind'{queue       = <<"queue">>,
-                                  exchange    = <<"amq.topic">>,
-                                  routing_key = <<"test-key">>}),
-              set_param(<<"test">>, [{<<"src-exchange">>,    <<"amq.direct">>},
-                                     {<<"src-exchange-key">>,<<"test-key">>},
-                                     {<<"dest-exchange">>,   <<"amq.topic">>}]),
-              publish_expect(Ch, <<"amq.direct">>, <<"test-key">>,
-                             <<"queue">>, <<"hello">>),
-              set_param(<<"test">>, [{<<"src-exchange">>,     <<"amq.direct">>},
-                                     {<<"src-exchange-key">>, <<"test-key">>},
-                                     {<<"dest-exchange">>,    <<"amq.topic">>},
-                                     {<<"dest-exchange-key">>,<<"new-key">>}]),
-              publish(Ch, <<"amq.direct">>, <<"test-key">>, <<"hello">>),
-              expect_empty(Ch, <<"queue">>),
-              amqp_channel:call(
-                Ch, #'queue.bind'{queue       = <<"queue">>,
-                                  exchange    = <<"amq.topic">>,
-                                  routing_key = <<"new-key">>}),
-              publish_expect(Ch, <<"amq.direct">>, <<"test-key">>,
-                             <<"queue">>, <<"hello">>)
-      end).
-
-restart_test() ->
-    with_ch(
-      fun (Ch) ->
-              set_param(<<"test">>, [{<<"src-queue">>,  <<"src">>},
-                                     {<<"dest-queue">>, <<"dest">>}]),
-              %% The catch is because connections link to the shovel,
-              %% so one connection will die, kill the shovel, kill
-              %% the other connection, then we can't close it
-              [catch amqp_connection:close(C) || C <- rabbit_direct:list()],
-              publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hello">>)
-      end).
-
-change_definition_test() ->
-    with_ch(
-      fun (Ch) ->
-              set_param(<<"test">>, [{<<"src-queue">>,  <<"src">>},
-                                     {<<"dest-queue">>, <<"dest">>}]),
-              publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hello">>),
-              set_param(<<"test">>, [{<<"src-queue">>,  <<"src">>},
-                                     {<<"dest-queue">>, <<"dest2">>}]),
-              publish_expect(Ch, <<>>, <<"src">>, <<"dest2">>, <<"hello">>),
-              expect_empty(Ch, <<"dest">>),
-              clear_param(<<"test">>),
-              publish_expect(Ch, <<>>, <<"src">>, <<"src">>, <<"hello">>),
-              expect_empty(Ch, <<"dest">>),
-              expect_empty(Ch, <<"dest2">>)
-      end).
-
-autodelete_test_() ->
-    [autodelete_case({<<"on-confirm">>, <<"queue-length">>,  0, 100}),
-     autodelete_case({<<"on-confirm">>, 50,                 50,  50}),
-     autodelete_case({<<"on-publish">>, <<"queue-length">>,  0, 100}),
-     autodelete_case({<<"on-publish">>, 50,                 50,  50}),
-     %% no-ack is not compatible with explicit count
-     autodelete_case({<<"no-ack">>,     <<"queue-length">>,  0, 100})].
-
-autodelete_case(Args) ->
-    fun () -> with_ch(autodelete_do(Args)) end.
-
-autodelete_do({AckMode, After, ExpSrc, ExpDest}) ->
-    fun (Ch) ->
-            amqp_channel:call(Ch, #'confirm.select'{}),
-            amqp_channel:call(Ch, #'queue.declare'{queue = <<"src">>}),
-            publish_count(Ch, <<>>, <<"src">>, <<"hello">>, 100),
-            amqp_channel:wait_for_confirms(Ch),
-            set_param_nowait(<<"test">>, [{<<"src-queue">>,    <<"src">>},
-                                          {<<"dest-queue">>,   <<"dest">>},
-                                          {<<"ack-mode">>,     AckMode},
-                                          {<<"delete-after">>, After}]),
-            await_autodelete(<<"test">>),
-            expect_count(Ch, <<"src">>, <<"hello">>, ExpSrc),
-            expect_count(Ch, <<"dest">>, <<"hello">>, ExpDest)
-    end.
-
-validation_test() ->
-    URIs = [{<<"src-uri">>,  <<"amqp://">>},
-            {<<"dest-uri">>, <<"amqp://">>}],
-
-    %% Need valid src and dest URIs
-    invalid_param([]),
-    invalid_param([{<<"src-queue">>, <<"test">>},
-                   {<<"src-uri">>,   <<"derp">>},
-                   {<<"dest-uri">>,  <<"amqp://">>}]),
-    invalid_param([{<<"src-queue">>, <<"test">>},
-                   {<<"src-uri">>,   [<<"derp">>]},
-                   {<<"dest-uri">>,  <<"amqp://">>}]),
-    invalid_param([{<<"src-queue">>, <<"test">>},
-                   {<<"dest-uri">>,  <<"amqp://">>}]),
-
-    %% Also need src exchange or queue
-    invalid_param(URIs),
-    valid_param([{<<"src-exchange">>, <<"test">>} | URIs]),
-    QURIs =     [{<<"src-queue">>,    <<"test">>} | URIs],
-    valid_param(QURIs),
-
-    %% But not both
-    invalid_param([{<<"src-exchange">>, <<"test">>} | QURIs]),
-
-    %% Check these are of right type
-    invalid_param([{<<"prefetch-count">>,  <<"three">>} | QURIs]),
-    invalid_param([{<<"reconnect-delay">>, <<"three">>} | QURIs]),
-    invalid_param([{<<"ack-mode">>,        <<"whenever">>} | QURIs]),
-    invalid_param([{<<"delete-after">>,    <<"whenever">>} | QURIs]),
-
-    %% Check properties have to look property-ish
-    invalid_param([{<<"publish-properties">>, [{<<"nonexistent">>, <<>>}]}]),
-    invalid_param([{<<"publish-properties">>, [{<<"cluster_id">>, 2}]}]),
-    invalid_param([{<<"publish-properties">>, <<"something">>}]),
-
-    %% Can't use explicit message count and no-ack together
-    invalid_param([{<<"delete-after">>,    1},
-                   {<<"ack-mode">>,        <<"no-ack">>} | QURIs]),
-    ok.
-
-security_validation_test() ->
-    [begin
-         rabbit_vhost:add(U),
-         rabbit_auth_backend_internal:add_user(U, <<>>),
-         rabbit_auth_backend_internal:set_permissions(
-           U, U, <<".*">>, <<".*">>, <<".*">>)
-     end || U <- [<<"a">>, <<"b">>]],
-
-    Qs = [{<<"src-queue">>, <<"test">>},
-          {<<"dest-queue">>, <<"test2">>}],
-
-    A = lookup_user(<<"a">>),
-    valid_param([{<<"src-uri">>,  <<"amqp:///a">>},
-                 {<<"dest-uri">>, <<"amqp:///a">>} | Qs], A),
-    invalid_param([{<<"src-uri">>,  <<"amqp:///a">>},
-                   {<<"dest-uri">>, <<"amqp:///b">>} | Qs], A),
-    invalid_param([{<<"src-uri">>,  <<"amqp:///b">>},
-                   {<<"dest-uri">>, <<"amqp:///a">>} | Qs], A),
-    [begin
-         rabbit_vhost:delete(U),
-         rabbit_auth_backend_internal:delete_user(U)
-     end || U <- [<<"a">>, <<"b">>]],
-    ok.
-
-%%----------------------------------------------------------------------------
-
-with_ch(Fun) ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    Fun(Ch),
-    amqp_connection:close(Conn),
-    cleanup(),
-    ok.
-
-publish(Ch, X, Key, Payload) when is_binary(Payload) ->
-    publish(Ch, X, Key, #amqp_msg{payload = Payload});
-
-publish(Ch, X, Key, Msg = #amqp_msg{}) ->
-    amqp_channel:cast(Ch, #'basic.publish'{exchange    = X,
-                                           routing_key = Key}, Msg).
-
-publish_expect(Ch, X, Key, Q, Payload) ->
-    publish(Ch, X, Key, Payload),
-    expect(Ch, Q, Payload).
-
-expect(Ch, Q, Payload) ->
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue  = Q,
-                                                no_ack = true}, self()),
-    receive
-        #'basic.consume_ok'{consumer_tag = CTag} -> ok
-    end,
-    Msg = receive
-              {#'basic.deliver'{}, #amqp_msg{payload = Payload} = M} ->
-                  M
-          after 1000 ->
-                  exit({not_received, Payload})
-          end,
-    amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag}),
-    Msg.
-
-expect_empty(Ch, Q) ->
-    ?assertMatch(#'basic.get_empty'{},
-                 amqp_channel:call(Ch, #'basic.get'{ queue = Q })).
-
-publish_count(Ch, X, Key, M, Count) ->
-    [publish(Ch, X, Key, M) || _ <- lists:seq(1, Count)].
-
-expect_count(Ch, Q, M, Count) ->
-    [expect(Ch, Q, M) || _ <- lists:seq(1, Count)],
-    expect_empty(Ch, Q).
-
-set_param(Name, Value) ->
-    set_param_nowait(Name, Value),
-    await_shovel(Name).
-
-set_param_nowait(Name, Value) ->
-    ok = rabbit_runtime_parameters:set(
-           <<"/">>, <<"shovel">>, Name, [{<<"src-uri">>,  <<"amqp://">>},
-                                         {<<"dest-uri">>, [<<"amqp://">>]} |
-                                         Value], none).
-
-invalid_param(Value, User) ->
-    {error_string, _} = rabbit_runtime_parameters:set(
-                          <<"/">>, <<"shovel">>, <<"invalid">>, Value, User).
-
-valid_param(Value, User) ->
-    ok = rabbit_runtime_parameters:set(
-           <<"/">>, <<"shovel">>, <<"a">>, Value, User),
-    ok = rabbit_runtime_parameters:clear(<<"/">>, <<"shovel">>, <<"a">>).
-
-invalid_param(Value) -> invalid_param(Value, none).
-valid_param(Value) -> valid_param(Value, none).
-
-lookup_user(Name) ->
-    {ok, User} = rabbit_access_control:check_user_login(Name, []),
-    User.
-
-clear_param(Name) ->
-    rabbit_runtime_parameters:clear(<<"/">>, <<"shovel">>, Name).
-
-cleanup() ->
-    [rabbit_runtime_parameters:clear(pget(vhost, P),
-                                     pget(component, P),
-                                     pget(name, P)) ||
-        P <- rabbit_runtime_parameters:list()],
-    [rabbit_amqqueue:delete(Q, false, false) || Q <- rabbit_amqqueue:list()].
-
-await_shovel(Name) ->
-    await(fun () -> lists:member(Name, shovels_from_status()) end).
-
-await_autodelete(Name) ->
-    await(fun () -> not lists:member(Name, shovels_from_parameters()) end),
-    await(fun () -> not lists:member(Name, shovels_from_status()) end).
-
-await(Pred) ->
-    case Pred() of
-        true  -> ok;
-        false -> timer:sleep(100),
-                 await(Pred)
-    end.
-
-shovels_from_status() ->
-    S = rabbit_shovel_status:status(),
-    [N || {{<<"/">>, N}, dynamic, {running, _}, _} <- S].
-
-shovels_from_parameters() ->
-    L = rabbit_runtime_parameters:list(<<"/">>, <<"shovel">>),
-    [pget(name, Shovel) || Shovel <- L].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-stomp/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml
deleted file mode 100644 (file)
index 467bda0..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-sudo: false
-language: erlang
-notifications:
-  email:
-    - alerts@rabbitmq.com
-addons:
-  apt:
-    packages:
-      - xsltproc
-otp_release:
-  - R16B03-1
-  - "17.5"
-  - "18.0"
-install:
-  - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi
-  - cd $HOME/rabbitmq-public-umbrella
-  - make co
-  - make up
-before_script:
-  - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG"
-  - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]}
-  - rm -rf ${TEST_DIR}
-  - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR}
-  - cd ${TEST_DIR}
-script: make test
-before_cache:
-  - rm -rf ${TEST_DIR}
-  - cd $HOME
-cache:
-  apt: true
-  directories:
-    - $HOME/rabbitmq-public-umbrella
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-stomp/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile b/rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile b/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile
deleted file mode 100644 (file)
index b082bb5..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-UPSTREAM_GIT=https://github.com/pika/pika.git
-REVISION=0.9.14
-
-LIB_DIR=pika
-CHECKOUT_DIR=pika-git
-
-TARGETS=$(LIB_DIR)
-
-all: $(TARGETS)
-
-clean:
-       rm -rf $(LIB_DIR)
-
-distclean: clean
-       rm -rf $(CHECKOUT_DIR)
-
-$(LIB_DIR) : $(CHECKOUT_DIR)
-       rm -rf $@
-       cp -R $< $@
-
-$(CHECKOUT_DIR):
-       git clone $(UPSTREAM_GIT) $@
-       (cd $@ && git checkout $(REVISION)) || rm -rf $@
-
-echo-revision:
-       @echo $(REVISION)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile b/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile
deleted file mode 100644 (file)
index 40f5bd1..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-UPSTREAM_GIT=https://github.com/jasonrbriggs/stomp.py.git
-REVISION=v4.0.16
-
-LIB_DIR=stomppy
-CHECKOUT_DIR=stomppy-git
-
-TARGETS=$(LIB_DIR)
-
-all: $(TARGETS)
-
-clean:
-       rm -rf $(LIB_DIR)
-
-distclean: clean
-       rm -rf $(CHECKOUT_DIR)
-
-$(LIB_DIR) : $(CHECKOUT_DIR)
-       rm -rf $@
-       cp -R $< $@
-
-$(CHECKOUT_DIR):
-       git clone $(UPSTREAM_GIT) $@
-       (cd $@ && git checkout $(REVISION)) || rm -rf $@
-
-echo-revision:
-       @echo $(REVISION)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk b/rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk
deleted file mode 100644 (file)
index daacc68..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-server rabbitmq-erlang-client rabbitmq-test
-STANDALONE_TEST_COMMANDS:=eunit:test([rabbit_stomp_test_util,rabbit_stomp_test_frame],[verbose])
-WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/src/test.py $(PACKAGE_DIR)/test/src/test_connect_options.py $(PACKAGE_DIR)/test/src/test_ssl.py
-WITH_BROKER_TEST_COMMANDS:=rabbit_stomp_test:all_tests() rabbit_stomp_amqqueue_test:all_tests()
-WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/test/ebin/test
-
-define package_rules
-
-$(PACKAGE_DIR)+pre-test::
-       rm -rf $(PACKAGE_DIR)/test/certs
-       mkdir $(PACKAGE_DIR)/test/certs
-       mkdir -p $(PACKAGE_DIR)/test/ebin
-       sed -e "s|%%CERTS_DIR%%|$(abspath $(PACKAGE_DIR))/test/certs|g" < $(PACKAGE_DIR)/test/src/test.config > $(PACKAGE_DIR)/test/ebin/test.config
-       $(MAKE) -C $(PACKAGE_DIR)/../rabbitmq-test/certs all PASSWORD=test DIR=$(abspath $(PACKAGE_DIR))/test/certs
-       $(MAKE) -C $(PACKAGE_DIR)/deps/stomppy
-       $(MAKE) -C $(PACKAGE_DIR)/deps/pika
-
-$(PACKAGE_DIR)+clean::
-       rm -rf $(PACKAGE_DIR)/test/certs
-
-$(PACKAGE_DIR)+clean-with-deps::
-       $(MAKE) -C $(PACKAGE_DIR)/deps/stomppy distclean
-       $(MAKE) -C $(PACKAGE_DIR)/deps/pika distclean
-
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl
deleted file mode 100644 (file)
index 673afee..0000000
+++ /dev/null
@@ -1,243 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_stomp_reader).
-
--export([start_link/3]).
--export([init/3, mainloop/2]).
--export([system_continue/3, system_terminate/4, system_code_change/4]).
--export([conserve_resources/3]).
-
--include("rabbit_stomp.hrl").
--include("rabbit_stomp_frame.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--record(reader_state, {socket, parse_state, processor, state,
-                       conserve_resources, recv_outstanding,
-                       parent}).
-
-%%----------------------------------------------------------------------------
-
-start_link(SupHelperPid, ProcessorPid, Configuration) ->
-        {ok, proc_lib:spawn_link(?MODULE, init,
-                                 [SupHelperPid, ProcessorPid, Configuration])}.
-
-log(Level, Fmt, Args) -> rabbit_log:log(connection, Level, Fmt, Args).
-
-init(SupHelperPid, ProcessorPid, Configuration) ->
-    Reply = go(SupHelperPid, ProcessorPid, Configuration),
-    rabbit_stomp_processor:flush_and_die(ProcessorPid),
-    Reply.
-
-go(SupHelperPid, ProcessorPid, Configuration) ->
-    process_flag(trap_exit, true),
-    receive
-        {go, Sock0, SockTransform} ->
-            case rabbit_net:connection_string(Sock0, inbound) of
-                {ok, ConnStr} ->
-                    case SockTransform(Sock0) of
-                        {ok, Sock} ->
-                            DebugOpts = sys:debug_options([]),
-                            ProcInitArgs = processor_args(SupHelperPid,
-                                                          Configuration,
-                                                          Sock),
-                            rabbit_stomp_processor:init_arg(ProcessorPid,
-                                                            ProcInitArgs),
-                            log(info, "accepting STOMP connection ~p (~s)~n",
-                                [self(), ConnStr]),
-
-                            ParseState = rabbit_stomp_frame:initial_state(),
-                            try
-                                mainloop(DebugOpts,
-                                  register_resource_alarm(
-                                    #reader_state{socket             = Sock,
-                                                  parse_state        = ParseState,
-                                                  processor          = ProcessorPid,
-                                                  state              = running,
-                                                  conserve_resources = false,
-                                                  recv_outstanding   = false})),
-                                log(info, "closing STOMP connection ~p (~s)~n",
-                                    [self(), ConnStr])
-                            catch _:Ex ->
-                                log_network_error(ConnStr, Ex),
-                                rabbit_net:fast_close(Sock),
-                                rabbit_stomp_processor:flush_and_die(ProcessorPid),
-                                exit(normal)
-                            end,
-                            done;
-                        {error, enotconn} ->
-                            rabbit_net:fast_close(Sock0),
-                            rabbit_stomp_processor:flush_and_die(ProcessorPid),
-                            exit(normal);
-                        {error, Reason} ->
-                            log_network_error(ConnStr, Reason),
-                            rabbit_net:fast_close(Sock0),
-                            rabbit_stomp_processor:flush_and_die(ProcessorPid),
-                            exit(normal)
-                        end
-            end
-    end.
-
-mainloop(DebugOpts, State0 = #reader_state{socket = Sock}) ->
-    State = run_socket(control_throttle(State0)),
-    receive
-        {inet_async, Sock, _Ref, {ok, Data}} ->
-            mainloop(DebugOpts, process_received_bytes(
-                       Data, State#reader_state{recv_outstanding = false}));
-        {inet_async, _Sock, _Ref, {error, closed}} ->
-            ok;
-        {inet_async, _Sock, _Ref, {error, Reason}} ->
-            throw({inet_error, Reason});
-        {inet_reply, _Sock, {error, closed}} ->
-            ok;
-        {conserve_resources, Conserve} ->
-            mainloop(DebugOpts, State#reader_state{conserve_resources = Conserve});
-        {bump_credit, Msg} ->
-            credit_flow:handle_bump_msg(Msg),
-            mainloop(DebugOpts, State);
-        {system, From, Request} ->
-            sys:handle_system_msg(Request, From, State#reader_state.parent,
-                                 ?MODULE, DebugOpts, State);
-        {'EXIT', _From, shutdown} ->
-            ok;
-        Other ->
-            log(warning, "STOMP connection ~p received "
-                "an unexpected message ~p~n", [Other]),
-            ok
-    end.
-
-process_received_bytes([], State) ->
-    State;
-process_received_bytes(Bytes,
-                       State = #reader_state{
-                         processor   = Processor,
-                         parse_state = ParseState,
-                         state       = S}) ->
-    case rabbit_stomp_frame:parse(Bytes, ParseState) of
-        {more, ParseState1} ->
-            State#reader_state{parse_state = ParseState1};
-        {ok, Frame, Rest} ->
-            rabbit_stomp_processor:process_frame(Processor, Frame),
-            PS = rabbit_stomp_frame:initial_state(),
-            process_received_bytes(Rest, State#reader_state{
-                                           parse_state = PS,
-                                           state       = next_state(S, Frame)})
-    end.
-
-conserve_resources(Pid, _Source, Conserve) ->
-    Pid ! {conserve_resources, Conserve},
-    ok.
-
-register_resource_alarm(State) ->
-    rabbit_alarm:register(self(), {?MODULE, conserve_resources, []}), State.
-
-control_throttle(State = #reader_state{state              = CS,
-                                       conserve_resources = Mem}) ->
-    case {CS, Mem orelse credit_flow:blocked()} of
-        {running,   true} -> State#reader_state{state = blocking};
-        {blocking, false} -> State#reader_state{state = running};
-        {blocked,  false} -> State#reader_state{state = running};
-        {_,            _} -> State
-    end.
-
-next_state(blocking, #stomp_frame{command = "SEND"}) ->
-    blocked;
-next_state(S, _) ->
-    S.
-
-run_socket(State = #reader_state{state = blocked}) ->
-    State;
-run_socket(State = #reader_state{recv_outstanding = true}) ->
-    State;
-run_socket(State = #reader_state{socket = Sock}) ->
-    rabbit_net:async_recv(Sock, 0, infinity),
-    State#reader_state{recv_outstanding = true}.
-
-%%----------------------------------------------------------------------------
-
-system_continue(Parent, DebugOpts, State) ->
-    mainloop(DebugOpts, State#reader_state{parent = Parent}).
-
-system_terminate(Reason, _Parent, _OldVsn, _Extra) ->
-    exit(Reason).
-
-system_code_change(Misc, _Module, _OldSvn, _Extra) ->
-    {ok, Misc}.
-
-%%----------------------------------------------------------------------------
-
-processor_args(SupPid, Configuration, Sock) ->
-    SendFun = fun (sync, IoData) ->
-                      %% no messages emitted
-                      catch rabbit_net:send(Sock, IoData);
-                  (async, IoData) ->
-                      %% {inet_reply, _, _} will appear soon
-                      %% We ignore certain errors here, as we will be
-                      %% receiving an asynchronous notification of the
-                      %% same (or a related) fault shortly anyway. See
-                      %% bug 21365.
-                      catch rabbit_net:port_command(Sock, IoData)
-              end,
-
-    StartHeartbeatFun =
-        fun (SendTimeout, SendFin, ReceiveTimeout, ReceiveFun) ->
-                rabbit_heartbeat:start(SupPid, Sock, SendTimeout,
-                                       SendFin, ReceiveTimeout, ReceiveFun)
-        end,
-    {ok, {PeerAddr, _PeerPort}} = rabbit_net:sockname(Sock),
-    [SendFun, adapter_info(Sock), StartHeartbeatFun,
-     ssl_login_name(Sock, Configuration), PeerAddr].
-
-adapter_info(Sock) ->
-    amqp_connection:socket_adapter_info(Sock, {'STOMP', 0}).
-
-ssl_login_name(_Sock, #stomp_configuration{ssl_cert_login = false}) ->
-    none;
-ssl_login_name(Sock, #stomp_configuration{ssl_cert_login = true}) ->
-    case rabbit_net:peercert(Sock) of
-        {ok, C}              -> case rabbit_ssl:peer_cert_auth_name(C) of
-                                    unsafe    -> none;
-                                    not_found -> none;
-                                    Name      -> Name
-                                end;
-        {error, no_peercert} -> none;
-        nossl                -> none
-    end.
-
-%%----------------------------------------------------------------------------
-
-log_network_error(ConnStr, {ssl_upgrade_error,
-                            {tls_alert, "handshake failure"}}) ->
-    log(error, "STOMP detected TLS upgrade error on "
-        "~p (~s): handshake failure~n", [self(), ConnStr]);
-
-log_network_error(ConnStr, {ssl_upgrade_error,
-                            {tls_alert, "unknown ca"}}) ->
-    log(error, "STOMP detected TLS certificate "
-        "verification error on "
-        "~p (~s): alert 'unknown CA'~n", [self(), ConnStr]);
-
-log_network_error(ConnStr, {ssl_upgrade_error, {tls_alert, Alert}}) ->
-    log(error, "STOMP detected TLS upgrade error on "
-        "~p (~s): alert ~s~n", [self(), ConnStr, Alert]);
-
-log_network_error(ConnStr, {ssl_upgrade_error, closed}) ->
-    log(error, "STOMP detected TLS upgrade error on "
-        "~p (~s): connection closed~n", [self(), ConnStr]);
-
-log_network_error(ConnStr, Ex) ->
-    log(error, "STOMP detected network error on "
-        "~p (~s):~n~p~n", [self(), ConnStr, Ex]).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbitmq_stomp.app.src b/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbitmq_stomp.app.src
deleted file mode 100644 (file)
index 6757e25..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-{application, rabbitmq_stomp,
- [{description, "Embedded Rabbit Stomp Adapter"},
-  {vsn, "%%VSN%%"},
-  {modules, []},
-  {registered, []},
-  {mod, {rabbit_stomp, []}},
-  {env, [{default_user,
-          [{login, "guest"},
-           {passcode, "guest"}]},
-         {default_vhost, <<"/">>},
-         {ssl_cert_login, false},
-         {implicit_connect, false},
-         {tcp_listeners, [61613]},
-         {ssl_listeners, []},
-         {tcp_listen_options, [binary,
-                               {packet,    raw},
-                               {reuseaddr, true},
-                               {backlog,   128},
-                               {nodelay,   true}]},
-        %% see rabbitmq/rabbitmq-stomp#39
-        {trailing_lf, true}]},
-  {applications, [kernel, stdlib, rabbit, amqp_client]}]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/ack.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/ack.py
deleted file mode 100644 (file)
index e87bca3..0000000
+++ /dev/null
@@ -1,242 +0,0 @@
-import unittest
-import stomp
-import base
-import time
-
-class TestAck(base.BaseTest):
-
-    def test_ack_client(self):
-        destination = "/queue/ack-test"
-
-        # subscribe and send message
-        self.listener.reset(2) ## expecting 2 messages
-        self.subscribe_dest(self.conn, destination, None,
-                            ack='client',
-                            headers={'prefetch-count': '10'})
-        self.conn.send(destination, "test1")
-        self.conn.send(destination, "test2")
-        self.assertTrue(self.listener.await(4), "initial message not received")
-        self.assertEquals(2, len(self.listener.messages))
-
-        # disconnect with no ack
-        self.conn.disconnect()
-
-        # now reconnect
-        conn2 = self.create_connection()
-        try:
-            listener2 = base.WaitableListener()
-            listener2.reset(2)
-            conn2.set_listener('', listener2)
-            self.subscribe_dest(conn2, destination, None,
-                                ack='client',
-                                headers={'prefetch-count': '10'})
-            self.assertTrue(listener2.await(), "message not received again")
-            self.assertEquals(2, len(listener2.messages))
-
-            # now ack only the last message - expecting cumulative behaviour
-            mid = listener2.messages[1]['headers'][self.ack_id_source_header]
-            self.ack_message(conn2, mid, None)
-        finally:
-            conn2.disconnect()
-
-        # now reconnect again, shouldn't see the message
-        conn3 = self.create_connection()
-        try:
-            listener3 = base.WaitableListener()
-            conn3.set_listener('', listener3)
-            self.subscribe_dest(conn3, destination, None)
-            self.assertFalse(listener3.await(3),
-                             "unexpected message. ACK not working?")
-        finally:
-            conn3.disconnect()
-
-    def test_ack_client_individual(self):
-        destination = "/queue/ack-test-individual"
-
-        # subscribe and send message
-        self.listener.reset(2) ## expecting 2 messages
-        self.subscribe_dest(self.conn, destination, None,
-                            ack='client-individual',
-                            headers={'prefetch-count': '10'})
-        self.conn.send(destination, "test1")
-        self.conn.send(destination, "test2")
-        self.assertTrue(self.listener.await(4), "Both initial messages not received")
-        self.assertEquals(2, len(self.listener.messages))
-
-        # disconnect without acks
-        self.conn.disconnect()
-
-        # now reconnect
-        conn2 = self.create_connection()
-        try:
-            listener2 = base.WaitableListener()
-            listener2.reset(2) ## expect 2 messages
-            conn2.set_listener('', listener2)
-            self.subscribe_dest(conn2, destination, None,
-                                ack='client-individual',
-                                headers={'prefetch-count': '10'})
-            self.assertTrue(listener2.await(2.5), "Did not receive 2 messages")
-            self.assertEquals(2, len(listener2.messages), "Not exactly 2 messages received")
-
-            # now ack only the 'test2' message - expecting individual behaviour
-            nummsgs = len(listener2.messages)
-            mid = None
-            for ind in range(nummsgs):
-                if listener2.messages[ind]['message']=="test2":
-                    mid = listener2.messages[ind]['headers'][self.ack_id_source_header]
-                    self.assertEquals(1, ind, 'Expecting test2 to be second message')
-                    break
-            self.assertTrue(mid, "Did not find test2 message id.")
-            self.ack_message(conn2, mid, None)
-        finally:
-            conn2.disconnect()
-
-        # now reconnect again, shouldn't see the message
-        conn3 = self.create_connection()
-        try:
-            listener3 = base.WaitableListener()
-            listener3.reset(2) ## expecting a single message, but wait for two
-            conn3.set_listener('', listener3)
-            self.subscribe_dest(conn3, destination, None)
-            self.assertFalse(listener3.await(2.5),
-                             "Expected to see only one message. ACK not working?")
-            self.assertEquals(1, len(listener3.messages), "Expecting exactly one message")
-            self.assertEquals("test1", listener3.messages[0]['message'], "Unexpected message remains")
-        finally:
-            conn3.disconnect()
-
-    def test_ack_client_tx(self):
-        destination = "/queue/ack-test-tx"
-
-        # subscribe and send message
-        self.listener.reset()
-        self.subscribe_dest(self.conn, destination, None, ack='client')
-        self.conn.send(destination, "test")
-        self.assertTrue(self.listener.await(3), "initial message not received")
-        self.assertEquals(1, len(self.listener.messages))
-
-        # disconnect with no ack
-        self.conn.disconnect()
-
-        # now reconnect
-        conn2 = self.create_connection()
-        try:
-            tx = "abc"
-            listener2 = base.WaitableListener()
-            conn2.set_listener('', listener2)
-            conn2.begin(transaction=tx)
-            self.subscribe_dest(conn2, destination, None, ack='client')
-            self.assertTrue(listener2.await(), "message not received again")
-            self.assertEquals(1, len(listener2.messages))
-
-            # now ack
-            mid = listener2.messages[0]['headers'][self.ack_id_source_header]
-            self.ack_message(conn2, mid, None, transaction=tx)
-
-            #now commit
-            conn2.commit(transaction=tx)
-        finally:
-            conn2.disconnect()
-
-        # now reconnect again, shouldn't see the message
-        conn3 = self.create_connection()
-        try:
-            listener3 = base.WaitableListener()
-            conn3.set_listener('', listener3)
-            self.subscribe_dest(conn3, destination, None)
-            self.assertFalse(listener3.await(3),
-                             "unexpected message. TX ACK not working?")
-        finally:
-            conn3.disconnect()
-
-    def test_topic_prefetch(self):
-        destination = "/topic/prefetch-test"
-
-        # subscribe and send message
-        self.listener.reset(6) ## expect 6 messages
-        self.subscribe_dest(self.conn, destination, None,
-                            ack='client',
-                            headers={'prefetch-count': '5'})
-
-        for x in range(10):
-            self.conn.send(destination, "test" + str(x))
-
-        self.assertFalse(self.listener.await(3),
-                         "Should not have been able to see 6 messages")
-        self.assertEquals(5, len(self.listener.messages))
-
-    def test_nack(self):
-        destination = "/queue/nack-test"
-
-        #subscribe and send
-        self.subscribe_dest(self.conn, destination, None,
-                            ack='client-individual')
-        self.conn.send(destination, "nack-test")
-
-        self.assertTrue(self.listener.await(), "Not received message")
-        message_id = self.listener.messages[0]['headers'][self.ack_id_source_header]
-        self.listener.reset()
-
-        self.nack_message(self.conn, message_id, None)
-        self.assertTrue(self.listener.await(), "Not received message after NACK")
-        message_id = self.listener.messages[0]['headers'][self.ack_id_source_header]
-        self.ack_message(self.conn, message_id, None)
-
-    def test_nack_multi(self):
-        destination = "/queue/nack-multi"
-
-        self.listener.reset(2)
-
-        #subscribe and send
-        self.subscribe_dest(self.conn, destination, None,
-                            ack='client',
-                            headers = {'prefetch-count' : '10'})
-        self.conn.send(destination, "nack-test1")
-        self.conn.send(destination, "nack-test2")
-
-        self.assertTrue(self.listener.await(), "Not received messages")
-        message_id = self.listener.messages[1]['headers'][self.ack_id_source_header]
-        self.listener.reset(2)
-
-        self.nack_message(self.conn, message_id, None)
-        self.assertTrue(self.listener.await(), "Not received message again")
-        message_id = self.listener.messages[1]['headers'][self.ack_id_source_header]
-        self.ack_message(self.conn, message_id, None)
-
-    def test_nack_without_requeueing(self):
-        destination = "/queue/nack-test-no-requeue"
-
-        self.subscribe_dest(self.conn, destination, None,
-                            ack='client-individual')
-        self.conn.send(destination, "nack-test")
-
-        self.assertTrue(self.listener.await(), "Not received message")
-        message_id = self.listener.messages[0]['headers'][self.ack_id_source_header]
-        self.listener.reset()
-
-        self.conn.send_frame("NACK", {self.ack_id_header: message_id, "requeue": False})
-        self.assertFalse(self.listener.await(4), "Received message after NACK with requeue = False")
-
-class TestAck11(TestAck):
-
-   def create_connection_obj(self, version='1.1', vhost='/', heartbeats=(0, 0)):
-       conn = stomp.StompConnection11(vhost=vhost,
-                                      heartbeats=heartbeats)
-       self.ack_id_source_header = 'message-id'
-       self.ack_id_header = 'message-id'
-       return conn
-
-   def test_version(self):
-       self.assertEquals('1.1', self.conn.version)
-
-class TestAck12(TestAck):
-
-   def create_connection_obj(self, version='1.2', vhost='/', heartbeats=(0, 0)):
-       conn = stomp.StompConnection12(vhost=vhost,
-                                      heartbeats=heartbeats)
-       self.ack_id_source_header = 'ack'
-       self.ack_id_header = 'id'
-       return conn
-
-   def test_version(self):
-       self.assertEquals('1.2', self.conn.version)
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/base.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/base.py
deleted file mode 100644 (file)
index e3d5819..0000000
+++ /dev/null
@@ -1,242 +0,0 @@
-import unittest
-import stomp
-import sys
-import threading
-
-
-class BaseTest(unittest.TestCase):
-
-   def create_connection_obj(self, version='1.0', vhost='/', heartbeats=(0, 0)):
-       if version == '1.0':
-           conn = stomp.StompConnection10()
-           self.ack_id_source_header = 'message-id'
-           self.ack_id_header = 'message-id'
-       elif version == '1.1':
-           conn = stomp.StompConnection11(vhost=vhost,
-                                          heartbeats=heartbeats)
-           self.ack_id_source_header = 'message-id'
-           self.ack_id_header = 'message-id'
-       elif version == '1.2':
-           conn = stomp.StompConnection12(vhost=vhost,
-                                          heartbeats=heartbeats)
-           self.ack_id_source_header = 'ack'
-           self.ack_id_header = 'id'
-       else:
-           conn = stomp.StompConnection12(vhost=vhost,
-                                          heartbeats=heartbeats)
-           conn.version = version
-       return conn
-
-   def create_connection(self, user='guest', passcode='guest', wait=True, **kwargs):
-       conn = self.create_connection_obj(**kwargs)
-       conn.start()
-       conn.connect(user, passcode, wait=wait)
-       return conn
-
-   def subscribe_dest(self, conn, destination, sub_id, **kwargs):
-       if type(conn) is stomp.StompConnection10:
-           # 'id' is optional in STOMP 1.0.
-           if sub_id != None:
-               kwargs['id'] = sub_id
-           conn.subscribe(destination, **kwargs)
-       else:
-           # 'id' is required in STOMP 1.1+.
-           if sub_id == None:
-               sub_id = 'ctag'
-           conn.subscribe(destination, sub_id, **kwargs)
-
-   def unsubscribe_dest(self, conn, destination, sub_id, **kwargs):
-       if type(conn) is stomp.StompConnection10:
-           # 'id' is optional in STOMP 1.0.
-           if sub_id != None:
-               conn.unsubscribe(id=sub_id, **kwargs)
-           else:
-               conn.unsubscribe(destination=destination, **kwargs)
-       else:
-           # 'id' is required in STOMP 1.1+.
-           if sub_id == None:
-               sub_id = 'ctag'
-           conn.unsubscribe(sub_id, **kwargs)
-
-   def ack_message(self, conn, msg_id, sub_id, **kwargs):
-       if type(conn) is stomp.StompConnection10:
-           conn.ack(msg_id, **kwargs)
-       elif type(conn) is stomp.StompConnection11:
-           if sub_id == None:
-               sub_id = 'ctag'
-           conn.ack(msg_id, sub_id, **kwargs)
-       elif type(conn) is stomp.StompConnection12:
-           conn.ack(msg_id, **kwargs)
-
-   def nack_message(self, conn, msg_id, sub_id, **kwargs):
-       if type(conn) is stomp.StompConnection10:
-           # Normally unsupported by STOMP 1.0.
-           conn.send_frame("NACK", {"message-id": msg_id})
-       elif type(conn) is stomp.StompConnection11:
-           if sub_id == None:
-               sub_id = 'ctag'
-           conn.nack(msg_id, sub_id, **kwargs)
-       elif type(conn) is stomp.StompConnection12:
-           conn.nack(msg_id, **kwargs)
-
-   def create_subscriber_connection(self, dest):
-       conn = self.create_connection()
-       listener = WaitableListener()
-       conn.set_listener('', listener)
-       self.subscribe_dest(conn, dest, None, receipt="sub.receipt")
-       listener.await()
-       self.assertEquals(1, len(listener.receipts))
-       listener.reset()
-       return conn, listener
-
-   def setUp(self):
-        self.conn = self.create_connection()
-        self.listener = WaitableListener()
-        self.conn.set_listener('', self.listener)
-
-   def tearDown(self):
-        if self.conn.is_connected():
-            self.conn.disconnect()
-            self.conn.stop()
-
-   def simple_test_send_rec(self, dest, route = None):
-        self.listener.reset()
-
-        self.subscribe_dest(self.conn, dest, None)
-        self.conn.send(dest, "foo")
-
-        self.assertTrue(self.listener.await(), "Timeout, no message received")
-
-        # assert no errors
-        if len(self.listener.errors) > 0:
-            self.fail(self.listener.errors[0]['message'])
-
-        # check header content
-        msg = self.listener.messages[0]
-        self.assertEquals("foo", msg['message'])
-        self.assertEquals(dest, msg['headers']['destination'])
-
-   def assertListener(self, errMsg, numMsgs=0, numErrs=0, numRcts=0, timeout=10):
-        if numMsgs + numErrs + numRcts > 0:
-            self._assertTrue(self.listener.await(timeout), errMsg + " (#awaiting)")
-        else:
-            self._assertFalse(self.listener.await(timeout), errMsg + " (#awaiting)")
-        self._assertEquals(numMsgs, len(self.listener.messages), errMsg + " (#messages)")
-        self._assertEquals(numErrs, len(self.listener.errors), errMsg + " (#errors)")
-        self._assertEquals(numRcts, len(self.listener.receipts), errMsg + " (#receipts)")
-
-   def _assertTrue(self, bool, msg):
-       if not bool:
-           self.listener.print_state(msg, True)
-           self.assertTrue(bool, msg)
-
-   def _assertFalse(self, bool, msg):
-       if bool:
-           self.listener.print_state(msg, True)
-           self.assertFalse(bool, msg)
-
-   def _assertEquals(self, expected, actual, msg):
-       if expected != actual:
-           self.listener.print_state(msg, True)
-           self.assertEquals(expected, actual, msg)
-
-   def assertListenerAfter(self, verb, errMsg="", numMsgs=0, numErrs=0, numRcts=0, timeout=5):
-        num = numMsgs + numErrs + numRcts
-        self.listener.reset(num if num>0 else 1)
-        verb()
-        self.assertListener(errMsg=errMsg, numMsgs=numMsgs, numErrs=numErrs, numRcts=numRcts, timeout=timeout)
-
-class WaitableListener(object):
-
-    def __init__(self):
-        self.debug = False
-        if self.debug:
-            print '(listener) init'
-        self.messages = []
-        self.errors = []
-        self.receipts = []
-        self.latch = Latch(1)
-        self.msg_no = 0
-
-    def _next_msg_no(self):
-        self.msg_no += 1
-        return self.msg_no
-
-    def _append(self, array, msg, hdrs):
-        mno = self._next_msg_no()
-        array.append({'message' : msg, 'headers' : hdrs, 'msg_no' : mno})
-        self.latch.countdown()
-
-    def on_receipt(self, headers, message):
-        if self.debug:
-            print '(on_receipt) message:', message, 'headers:', headers
-        self._append(self.receipts, message, headers)
-
-    def on_error(self, headers, message):
-        if self.debug:
-            print '(on_error) message:', message, 'headers:', headers
-        self._append(self.errors, message, headers)
-
-    def on_message(self, headers, message):
-        if self.debug:
-            print '(on_message) message:', message, 'headers:', headers
-        self._append(self.messages, message, headers)
-
-    def reset(self, count=1):
-        if self.debug:
-            self.print_state('(reset listener--old state)')
-        self.messages = []
-        self.errors = []
-        self.receipts = []
-        self.latch = Latch(count)
-        self.msg_no = 0
-        if self.debug:
-            self.print_state('(reset listener--new state)')
-
-    def await(self, timeout=10):
-        return self.latch.await(timeout)
-
-    def print_state(self, hdr="", full=False):
-        print hdr,
-        print '#messages:', len(self.messages),
-        print '#errors:', len(self.errors),
-        print '#receipts:', len(self.receipts),
-        print 'Remaining count:', self.latch.get_count()
-        if full:
-            if len(self.messages) != 0: print 'Messages:', self.messages
-            if len(self.errors) != 0: print 'Messages:', self.errors
-            if len(self.receipts) != 0: print 'Messages:', self.receipts
-
-class Latch(object):
-
-   def __init__(self, count=1):
-      self.cond = threading.Condition()
-      self.cond.acquire()
-      self.count = count
-      self.cond.release()
-
-   def countdown(self):
-      self.cond.acquire()
-      if self.count > 0:
-         self.count -= 1
-      if self.count == 0:
-         self.cond.notify_all()
-      self.cond.release()
-
-   def await(self, timeout=None):
-      try:
-         self.cond.acquire()
-         if self.count == 0:
-            return True
-         else:
-            self.cond.wait(timeout)
-            return self.count == 0
-      finally:
-         self.cond.release()
-
-   def get_count(self):
-      try:
-         self.cond.acquire()
-         return self.count
-      finally:
-         self.cond.release()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/connect_options.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/connect_options.py
deleted file mode 100644 (file)
index d802bc6..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-import unittest
-import stomp
-import base
-import test_util
-
-class TestConnectOptions(base.BaseTest):
-
-    def test_implicit_connect(self):
-        ''' Implicit connect with receipt on first command '''
-        self.conn.disconnect()
-        test_util.enable_implicit_connect()
-        listener = base.WaitableListener()
-        new_conn = stomp.Connection()
-        new_conn.set_listener('', listener)
-
-        new_conn.start() # not going to issue connect
-        self.subscribe_dest(new_conn, "/topic/implicit", 'sub_implicit',
-                            receipt='implicit')
-
-        try:
-            self.assertTrue(listener.await(5))
-            self.assertEquals(1, len(listener.receipts),
-                              'Missing receipt. Likely not connected')
-            self.assertEquals('implicit', listener.receipts[0]['headers']['receipt-id'])
-        finally:
-            new_conn.disconnect()
-            test_util.disable_implicit_connect()
-
-    def test_default_user(self):
-        ''' Default user connection '''
-        self.conn.disconnect()
-        test_util.enable_default_user()
-        listener = base.WaitableListener()
-        new_conn = stomp.Connection()
-        new_conn.set_listener('', listener)
-        new_conn.start()
-        new_conn.connect()
-        try:
-            self.assertFalse(listener.await(3)) # no error back
-            self.assertTrue(new_conn.is_connected())
-        finally:
-            new_conn.disconnect()
-            test_util.disable_default_user()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/destinations.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/destinations.py
deleted file mode 100644 (file)
index 760bb9f..0000000
+++ /dev/null
@@ -1,516 +0,0 @@
-import unittest
-import stomp
-import base
-import time
-
-class TestExchange(base.BaseTest):
-
-
-    def test_amq_direct(self):
-        ''' Test basic send/receive for /exchange/amq.direct '''
-        self.__test_exchange_send_rec("amq.direct", "route")
-
-    def test_amq_topic(self):
-        ''' Test basic send/receive for /exchange/amq.topic '''
-        self.__test_exchange_send_rec("amq.topic", "route")
-
-    def test_amq_fanout(self):
-        ''' Test basic send/receive for /exchange/amq.fanout '''
-        self.__test_exchange_send_rec("amq.fanout", "route")
-
-    def test_amq_fanout_no_route(self):
-        ''' Test basic send/receive, /exchange/amq.direct, no routing key'''
-        self.__test_exchange_send_rec("amq.fanout")
-
-    def test_invalid_exchange(self):
-        ''' Test invalid exchange error '''
-        self.listener.reset(1)
-        self.subscribe_dest(self.conn, "/exchange/does.not.exist", None,
-                            ack="auto")
-        self.assertListener("Expecting an error", numErrs=1)
-        err = self.listener.errors[0]
-        self.assertEquals("not_found", err['headers']['message'])
-        self.assertEquals(
-            "NOT_FOUND - no exchange 'does.not.exist' in vhost '/'\n",
-            err['message'])
-        time.sleep(1)
-        self.assertFalse(self.conn.is_connected())
-
-    def __test_exchange_send_rec(self, exchange, route = None):
-        if exchange != "amq.topic":
-            dest = "/exchange/" + exchange
-        else:
-            dest = "/topic"
-        if route != None:
-            dest += "/" + route
-
-        self.simple_test_send_rec(dest)
-
-class TestQueue(base.BaseTest):
-
-    def test_send_receive(self):
-        ''' Test basic send/receive for /queue '''
-        destination = '/queue/test'
-        self.simple_test_send_rec(destination)
-
-    def test_send_receive_in_other_conn(self):
-        ''' Test send in one connection, receive in another '''
-        destination = '/queue/test2'
-
-        # send
-        self.conn.send(destination, "hello")
-
-        # now receive
-        conn2 = self.create_connection()
-        try:
-            listener2 = base.WaitableListener()
-            conn2.set_listener('', listener2)
-
-            self.subscribe_dest(conn2, destination, None, ack="auto")
-            self.assertTrue(listener2.await(10), "no receive")
-        finally:
-            conn2.disconnect()
-
-    def test_send_receive_in_other_conn_with_disconnect(self):
-        ''' Test send, disconnect, receive '''
-        destination = '/queue/test3'
-
-        # send
-        self.conn.send(destination, "hello thar", receipt="foo")
-        self.listener.await(3)
-        self.conn.disconnect()
-
-        # now receive
-        conn2 = self.create_connection()
-        try:
-            listener2 = base.WaitableListener()
-            conn2.set_listener('', listener2)
-
-            self.subscribe_dest(conn2, destination, None, ack="auto")
-            self.assertTrue(listener2.await(10), "no receive")
-        finally:
-            conn2.disconnect()
-
-
-    def test_multi_subscribers(self):
-        ''' Test multiple subscribers against a single /queue destination '''
-        destination = '/queue/test-multi'
-
-        ## set up two subscribers
-        conn1, listener1 = self.create_subscriber_connection(destination)
-        conn2, listener2 = self.create_subscriber_connection(destination)
-
-        try:
-            ## now send
-            self.conn.send(destination, "test1")
-            self.conn.send(destination, "test2")
-
-            ## expect both consumers to get a message?
-            self.assertTrue(listener1.await(2))
-            self.assertEquals(1, len(listener1.messages),
-                              "unexpected message count")
-            self.assertTrue(listener2.await(2))
-            self.assertEquals(1, len(listener2.messages),
-                              "unexpected message count")
-        finally:
-            conn1.disconnect()
-            conn2.disconnect()
-
-    def test_send_with_receipt(self):
-        destination = '/queue/test-receipt'
-        def noop(): pass
-        self.__test_send_receipt(destination, noop, noop)
-
-    def test_send_with_receipt_tx(self):
-        destination = '/queue/test-receipt-tx'
-        tx = 'receipt.tx'
-
-        def before():
-            self.conn.begin(transaction=tx)
-
-        def after():
-            self.assertFalse(self.listener.await(1))
-            self.conn.commit(transaction=tx)
-
-        self.__test_send_receipt(destination, before, after, {'transaction': tx})
-
-    def test_interleaved_receipt_no_receipt(self):
-        ''' Test i-leaved receipt/no receipt, no-r bracketed by rs '''
-
-        destination = '/queue/ir'
-
-        self.listener.reset(5)
-
-        self.subscribe_dest(self.conn, destination, None, ack="auto")
-        self.conn.send(destination, 'first', receipt='a')
-        self.conn.send(destination, 'second')
-        self.conn.send(destination, 'third', receipt='b')
-
-        self.assertListener("Missing messages/receipts", numMsgs=3, numRcts=2, timeout=3)
-
-        self.assertEquals(set(['a','b']), self.__gather_receipts())
-
-    def test_interleaved_receipt_no_receipt_tx(self):
-        ''' Test i-leaved receipt/no receipt, no-r bracketed by r+xactions '''
-
-        destination = '/queue/ir'
-        tx = 'tx.ir'
-
-        # three messages and two receipts
-        self.listener.reset(5)
-
-        self.subscribe_dest(self.conn, destination, None, ack="auto")
-        self.conn.begin(transaction=tx)
-
-        self.conn.send(destination, 'first', receipt='a', transaction=tx)
-        self.conn.send(destination, 'second', transaction=tx)
-        self.conn.send(destination, 'third', receipt='b', transaction=tx)
-        self.conn.commit(transaction=tx)
-
-        self.assertListener("Missing messages/receipts", numMsgs=3, numRcts=2, timeout=40)
-
-        expected = set(['a', 'b'])
-        missing = expected.difference(self.__gather_receipts())
-
-        self.assertEquals(set(), missing, "Missing receipts: " + str(missing))
-
-    def test_interleaved_receipt_no_receipt_inverse(self):
-        ''' Test i-leaved receipt/no receipt, r bracketed by no-rs '''
-
-        destination = '/queue/ir'
-
-        self.listener.reset(4)
-
-        self.subscribe_dest(self.conn, destination, None, ack="auto")
-        self.conn.send(destination, 'first')
-        self.conn.send(destination, 'second', receipt='a')
-        self.conn.send(destination, 'third')
-
-        self.assertListener("Missing messages/receipt", numMsgs=3, numRcts=1, timeout=3)
-
-        self.assertEquals(set(['a']), self.__gather_receipts())
-
-    def __test_send_receipt(self, destination, before, after, headers = {}):
-        count = 50
-        self.listener.reset(count)
-
-        before()
-        expected_receipts = set()
-
-        for x in range(0, count):
-            receipt = "test" + str(x)
-            expected_receipts.add(receipt)
-            self.conn.send(destination, "test receipt",
-                           receipt=receipt, headers=headers)
-        after()
-
-        self.assertTrue(self.listener.await(5))
-
-        missing_receipts = expected_receipts.difference(
-                    self.__gather_receipts())
-
-        self.assertEquals(set(), missing_receipts,
-                          "missing receipts: " + str(missing_receipts))
-
-    def __gather_receipts(self):
-        result = set()
-        for r in self.listener.receipts:
-            result.add(r['headers']['receipt-id'])
-        return result
-
-class TestTopic(base.BaseTest):
-
-      def test_send_receive(self):
-        ''' Test basic send/receive for /topic '''
-        destination = '/topic/test'
-        self.simple_test_send_rec(destination)
-
-      def test_send_multiple(self):
-          ''' Test /topic with multiple consumers '''
-          destination = '/topic/multiple'
-
-          ## set up two subscribers
-          conn1, listener1 = self.create_subscriber_connection(destination)
-          conn2, listener2 = self.create_subscriber_connection(destination)
-
-          try:
-              ## listeners are expecting 2 messages
-              listener1.reset(2)
-              listener2.reset(2)
-
-              ## now send
-              self.conn.send(destination, "test1")
-              self.conn.send(destination, "test2")
-
-              ## expect both consumers to get both messages
-              self.assertTrue(listener1.await(5))
-              self.assertEquals(2, len(listener1.messages),
-                                "unexpected message count")
-              self.assertTrue(listener2.await(5))
-              self.assertEquals(2, len(listener2.messages),
-                                "unexpected message count")
-          finally:
-              conn1.disconnect()
-              conn2.disconnect()
-
-      def test_send_multiple_with_a_large_message(self):
-          ''' Test /topic with multiple consumers '''
-          destination = '/topic/16mb'
-          # payload size
-          s = 1024 * 1024 * 16
-          message = 'x' * s
-
-          conn1, listener1 = self.create_subscriber_connection(destination)
-          conn2, listener2 = self.create_subscriber_connection(destination)
-
-          try:
-              listener1.reset(2)
-              listener2.reset(2)
-
-              self.conn.send(destination, message)
-              self.conn.send(destination, message)
-
-              self.assertTrue(listener1.await(10))
-              self.assertEquals(2, len(listener1.messages),
-                                "unexpected message count")
-              self.assertTrue(len(listener2.messages[0]['message']) == s,
-                              "unexpected message size")
-
-              self.assertTrue(listener2.await(10))
-              self.assertEquals(2, len(listener2.messages),
-                                "unexpected message count")
-          finally:
-              conn1.disconnect()
-              conn2.disconnect()
-
-class TestReplyQueue(base.BaseTest):
-
-    def test_reply_queue(self):
-        ''' Test with two separate clients. Client 1 sends
-        message to a known destination with a defined reply
-        queue. Client 2 receives on known destination and replies
-        on the reply destination. Client 1 gets the reply message'''
-
-        known = '/queue/known'
-        reply = '/temp-queue/0'
-
-        ## Client 1 uses pre-supplied connection and listener
-        ## Set up client 2
-        conn2, listener2 = self.create_subscriber_connection(known)
-
-        try:
-            self.conn.send(known, "test",
-                           headers = {"reply-to": reply})
-
-            self.assertTrue(listener2.await(5))
-            self.assertEquals(1, len(listener2.messages))
-
-            reply_to = listener2.messages[0]['headers']['reply-to']
-            self.assertTrue(reply_to.startswith('/reply-queue/'))
-
-            conn2.send(reply_to, "reply")
-            self.assertTrue(self.listener.await(5))
-            self.assertEquals("reply", self.listener.messages[0]['message'])
-        finally:
-            conn2.disconnect()
-
-    def test_reuse_reply_queue(self):
-        ''' Test re-use of reply-to queue '''
-
-        known2 = '/queue/known2'
-        known3 = '/queue/known3'
-        reply = '/temp-queue/foo'
-
-        def respond(cntn, listna):
-            self.assertTrue(listna.await(5))
-            self.assertEquals(1, len(listna.messages))
-            reply_to = listna.messages[0]['headers']['reply-to']
-            self.assertTrue(reply_to.startswith('/reply-queue/'))
-            cntn.send(reply_to, "reply")
-
-        ## Client 1 uses pre-supplied connection and listener
-        ## Set up clients 2 and 3
-        conn2, listener2 = self.create_subscriber_connection(known2)
-        conn3, listener3 = self.create_subscriber_connection(known3)
-        try:
-            self.listener.reset(2)
-            self.conn.send(known2, "test2",
-                           headers = {"reply-to": reply})
-            self.conn.send(known3, "test3",
-                           headers = {"reply-to": reply})
-            respond(conn2, listener2)
-            respond(conn3, listener3)
-
-            self.assertTrue(self.listener.await(5))
-            self.assertEquals(2, len(self.listener.messages))
-            self.assertEquals("reply", self.listener.messages[0]['message'])
-            self.assertEquals("reply", self.listener.messages[1]['message'])
-        finally:
-            conn2.disconnect()
-            conn3.disconnect()
-
-    def test_perm_reply_queue(self):
-        '''As test_reply_queue, but with a non-temp reply queue'''
-
-        known = '/queue/known'
-        reply = '/queue/reply'
-
-        ## Client 1 uses pre-supplied connection and listener
-        ## Set up client 2
-        conn1, listener1 = self.create_subscriber_connection(reply)
-        conn2, listener2 = self.create_subscriber_connection(known)
-
-        try:
-            conn1.send(known, "test",
-                       headers = {"reply-to": reply})
-
-            self.assertTrue(listener2.await(5))
-            self.assertEquals(1, len(listener2.messages))
-
-            reply_to = listener2.messages[0]['headers']['reply-to']
-            self.assertTrue(reply_to == reply)
-
-            conn2.send(reply_to, "reply")
-            self.assertTrue(listener1.await(5))
-            self.assertEquals("reply", listener1.messages[0]['message'])
-        finally:
-            conn1.disconnect()
-            conn2.disconnect()
-
-class TestDurableSubscription(base.BaseTest):
-
-    ID = 'test.subscription'
-
-    def __subscribe(self, dest, conn=None, id=None):
-        if not conn:
-            conn = self.conn
-        if not id:
-            id = TestDurableSubscription.ID
-
-        self.subscribe_dest(conn, dest, id, ack="auto",
-                            headers = {'persistent': 'true',
-                                       'receipt': 1})
-
-    def __assert_receipt(self, listener=None, pos=None):
-        if not listener:
-            listener = self.listener
-
-        self.assertTrue(listener.await(5))
-        self.assertEquals(1, len(self.listener.receipts))
-        if pos is not None:
-            self.assertEquals(pos, self.listener.receipts[0]['msg_no'])
-
-    def __assert_message(self, msg, listener=None, pos=None):
-        if not listener:
-            listener = self.listener
-
-        self.assertTrue(listener.await(5))
-        self.assertEquals(1, len(listener.messages))
-        self.assertEquals(msg, listener.messages[0]['message'])
-        if pos is not None:
-            self.assertEquals(pos, self.listener.messages[0]['msg_no'])
-
-    def test_durable_subscription(self):
-        destination = '/topic/durable'
-
-        self.__subscribe(destination)
-        self.__assert_receipt()
-
-        # send first message without unsubscribing
-        self.listener.reset(1)
-        self.conn.send(destination, "first")
-        self.__assert_message("first")
-
-        # now unsubscribe (disconnect only)
-        self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID)
-
-        # send again
-        self.listener.reset(2)
-        self.conn.send(destination, "second")
-
-        # resubscribe and expect receipt
-        self.__subscribe(destination)
-        self.__assert_receipt(pos=1)
-        # and message
-        self.__assert_message("second", pos=2)
-
-        # now unsubscribe (cancel)
-        self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID,
-                              headers={'persistent': 'true'})
-
-        # send again
-        self.listener.reset(1)
-        self.conn.send(destination, "third")
-
-        # resubscribe and expect no message
-        self.__subscribe(destination)
-        self.assertTrue(self.listener.await(3))
-        self.assertEquals(0, len(self.listener.messages))
-        self.assertEquals(1, len(self.listener.receipts))
-
-    def test_share_subscription(self):
-        destination = '/topic/durable-shared'
-
-        conn2 = self.create_connection()
-        conn2.set_listener('', self.listener)
-
-        try:
-            self.__subscribe(destination)
-            self.__assert_receipt()
-            self.listener.reset(1)
-            self.__subscribe(destination, conn2)
-            self.__assert_receipt()
-
-            self.listener.reset(100)
-
-            # send 100 messages
-            for x in xrange(0, 100):
-                self.conn.send(destination, "msg" + str(x))
-
-            self.assertTrue(self.listener.await(5))
-            self.assertEquals(100, len(self.listener.messages))
-        finally:
-            conn2.disconnect()
-
-    def test_separate_ids(self):
-        destination = '/topic/durable-separate'
-
-        conn2 = self.create_connection()
-        listener2 = base.WaitableListener()
-        conn2.set_listener('', listener2)
-
-        try:
-            # ensure durable subscription exists for each ID
-            self.__subscribe(destination)
-            self.__assert_receipt()
-            self.__subscribe(destination, conn2, "other.id")
-            self.__assert_receipt(listener2)
-            self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID)
-            self.unsubscribe_dest(conn2, destination, "other.id")
-
-            self.listener.reset(101)
-            listener2.reset(101) ## 100 messages and 1 receipt
-
-            # send 100 messages
-            for x in xrange(0, 100):
-                self.conn.send(destination, "msg" + str(x))
-
-            self.__subscribe(destination)
-            self.__subscribe(destination, conn2, "other.id")
-
-            for l in [self.listener, listener2]:
-                self.assertTrue(l.await(15))
-                self.assertTrue(len(l.messages) >= 90)
-                self.assertTrue(len(l.messages) <= 100)
-
-        finally:
-            conn2.disconnect()
-
-    def test_durable_subscribe_no_id(self):
-        destination = '/topic/durable-invalid'
-
-        self.conn.send_frame('SUBSCRIBE',
-            {'destination': destination, 'ack': 'auto', 'persistent': 'true'})
-        self.listener.await(3)
-        self.assertEquals(1, len(self.listener.errors))
-        self.assertEquals("Missing Header", self.listener.errors[0]['headers']['message'])
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/errors.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/errors.py
deleted file mode 100644 (file)
index d3fa60a..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-import unittest
-import stomp
-import base
-import time
-
-class TestErrorsAndCloseConnection(base.BaseTest):
-    def __test_duplicate_consumer_tag_with_headers(self, destination, headers):
-        self.subscribe_dest(self.conn, destination, None,
-                            headers = headers)
-
-        self.subscribe_dest(self.conn, destination, None,
-                            headers = headers)
-
-        self.assertTrue(self.listener.await())
-
-        self.assertEquals(1, len(self.listener.errors))
-        errorReceived = self.listener.errors[0]
-        self.assertEquals("Duplicated subscription identifier", errorReceived['headers']['message'])
-        self.assertEquals("A subscription identified by 'T_1' alredy exists.", errorReceived['message'])
-        time.sleep(2)
-        self.assertFalse(self.conn.is_connected())
-
-
-    def test_duplicate_consumer_tag_with_transient_destination(self):
-        destination = "/exchange/amq.direct/duplicate-consumer-tag-test1"
-        self.__test_duplicate_consumer_tag_with_headers(destination, {'id': 1})
-
-    def test_duplicate_consumer_tag_with_durable_destination(self):
-        destination = "/queue/duplicate-consumer-tag-test2"
-        self.__test_duplicate_consumer_tag_with_headers(destination, {'id': 1,
-                                                                      'persistent': True})
-
-
-class TestErrors(base.BaseTest):
-
-    def test_invalid_queue_destination(self):
-        self.__test_invalid_destination("queue", "/bah/baz")
-
-    def test_invalid_empty_queue_destination(self):
-        self.__test_invalid_destination("queue", "")
-
-    def test_invalid_topic_destination(self):
-        self.__test_invalid_destination("topic", "/bah/baz")
-
-    def test_invalid_empty_topic_destination(self):
-        self.__test_invalid_destination("topic", "")
-
-    def test_invalid_exchange_destination(self):
-        self.__test_invalid_destination("exchange", "/bah/baz/boo")
-
-    def test_invalid_empty_exchange_destination(self):
-        self.__test_invalid_destination("exchange", "")
-
-    def test_invalid_default_exchange_destination(self):
-        self.__test_invalid_destination("exchange", "//foo")
-
-    def test_unknown_destination(self):
-        self.listener.reset()
-        self.conn.send("/something/interesting", 'test_unknown_destination')
-
-        self.assertTrue(self.listener.await())
-        self.assertEquals(1, len(self.listener.errors))
-
-        err = self.listener.errors[0]
-        self.assertEquals("Unknown destination", err['headers']['message'])
-
-    def test_send_missing_destination(self):
-        self.__test_missing_destination("SEND")
-
-    def test_send_missing_destination(self):
-        self.__test_missing_destination("SUBSCRIBE")
-
-    def __test_missing_destination(self, command):
-        self.listener.reset()
-        self.conn.send_frame(command)
-
-        self.assertTrue(self.listener.await())
-        self.assertEquals(1, len(self.listener.errors))
-
-        err = self.listener.errors[0]
-        self.assertEquals("Missing destination", err['headers']['message'])
-
-    def __test_invalid_destination(self, dtype, content):
-        self.listener.reset()
-        self.conn.send("/" + dtype + content, '__test_invalid_destination:' + dtype + content)
-
-        self.assertTrue(self.listener.await())
-        self.assertEquals(1, len(self.listener.errors))
-
-        err = self.listener.errors[0]
-        self.assertEquals("Invalid destination", err['headers']['message'])
-        self.assertEquals("'" + content + "' is not a valid " +
-                              dtype + " destination\n",
-                          err['message'])
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/lifecycle.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/lifecycle.py
deleted file mode 100644 (file)
index ff9b119..0000000
+++ /dev/null
@@ -1,168 +0,0 @@
-import unittest
-import stomp
-import base
-import time
-
-class TestLifecycle(base.BaseTest):
-
-    def test_unsubscribe_exchange_destination(self):
-        ''' Test UNSUBSCRIBE command with exchange'''
-        d = "/exchange/amq.fanout"
-        self.unsub_test(d, self.sub_and_send(d))
-
-    def test_unsubscribe_exchange_destination_with_receipt(self):
-        ''' Test receipted UNSUBSCRIBE command with exchange'''
-        d = "/exchange/amq.fanout"
-        self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
-
-    def test_unsubscribe_queue_destination(self):
-        ''' Test UNSUBSCRIBE command with queue'''
-        d = "/queue/unsub01"
-        self.unsub_test(d, self.sub_and_send(d))
-
-    def test_unsubscribe_queue_destination_with_receipt(self):
-        ''' Test receipted UNSUBSCRIBE command with queue'''
-        d = "/queue/unsub02"
-        self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
-
-    def test_unsubscribe_exchange_id(self):
-        ''' Test UNSUBSCRIBE command with exchange by id'''
-        d = "/exchange/amq.fanout"
-        self.unsub_test(d, self.sub_and_send(d, subid="exchid"))
-
-    def test_unsubscribe_exchange_id_with_receipt(self):
-        ''' Test receipted UNSUBSCRIBE command with exchange by id'''
-        d = "/exchange/amq.fanout"
-        self.unsub_test(d, self.sub_and_send(d, subid="exchid", receipt="unsub.rct"), numRcts=1)
-
-    def test_unsubscribe_queue_id(self):
-        ''' Test UNSUBSCRIBE command with queue by id'''
-        d = "/queue/unsub03"
-        self.unsub_test(d, self.sub_and_send(d, subid="queid"))
-
-    def test_unsubscribe_queue_id_with_receipt(self):
-        ''' Test receipted UNSUBSCRIBE command with queue by id'''
-        d = "/queue/unsub04"
-        self.unsub_test(d, self.sub_and_send(d, subid="queid", receipt="unsub.rct"), numRcts=1)
-
-    def test_connect_version_1_0(self):
-        ''' Test CONNECT with version 1.0'''
-        self.conn.disconnect()
-        new_conn = self.create_connection(version="1.0")
-        try:
-            self.assertTrue(new_conn.is_connected())
-        finally:
-            new_conn.disconnect()
-            self.assertFalse(new_conn.is_connected())
-
-    def test_connect_version_1_1(self):
-        ''' Test CONNECT with version 1.1'''
-        self.conn.disconnect()
-        new_conn = self.create_connection(version="1.1")
-        try:
-            self.assertTrue(new_conn.is_connected())
-        finally:
-            new_conn.disconnect()
-            self.assertFalse(new_conn.is_connected())
-
-    def test_connect_version_1_2(self):
-        ''' Test CONNECT with version 1.2'''
-        self.conn.disconnect()
-        new_conn = self.create_connection(version="1.2")
-        try:
-            self.assertTrue(new_conn.is_connected())
-        finally:
-            new_conn.disconnect()
-            self.assertFalse(new_conn.is_connected())
-
-    def test_heartbeat_disconnects_client(self):
-        ''' Test heart-beat disconnection'''
-        self.conn.disconnect()
-        new_conn = self.create_connection(version='1.1', heartbeats=(1500, 0))
-        try:
-            self.assertTrue(new_conn.is_connected())
-            time.sleep(1)
-            self.assertTrue(new_conn.is_connected())
-            time.sleep(3)
-            self.assertFalse(new_conn.is_connected())
-        finally:
-            if new_conn.is_connected():
-                new_conn.disconnect()
-
-    def test_unsupported_version(self):
-        ''' Test unsupported version on CONNECT command'''
-        self.bad_connect("Supported versions are 1.0,1.1,1.2\n", version='100.1')
-
-    def test_bad_username(self):
-        ''' Test bad username'''
-        self.bad_connect("Access refused for user 'gust'\n", user='gust')
-
-    def test_bad_password(self):
-        ''' Test bad password'''
-        self.bad_connect("Access refused for user 'guest'\n", passcode='gust')
-
-    def test_bad_vhost(self):
-        ''' Test bad virtual host'''
-        self.bad_connect("Virtual host '//' access denied", version='1.1', vhost='//')
-
-    def bad_connect(self, expected, user='guest', passcode='guest', **kwargs):
-        self.conn.disconnect()
-        new_conn = self.create_connection_obj(**kwargs)
-        listener = base.WaitableListener()
-        new_conn.set_listener('', listener)
-        try:
-            new_conn.start()
-            new_conn.connect(user, passcode)
-            self.assertTrue(listener.await())
-            self.assertEquals(expected, listener.errors[0]['message'])
-        finally:
-            if new_conn.is_connected():
-                new_conn.disconnect()
-
-    def test_bad_header_on_send(self):
-        ''' Test disallowed header on SEND '''
-        self.listener.reset(1)
-        self.conn.send_frame("SEND", {"destination":"a", "message-id":"1"})
-        self.assertTrue(self.listener.await())
-        self.assertEquals(1, len(self.listener.errors))
-        errorReceived = self.listener.errors[0]
-        self.assertEquals("Invalid header", errorReceived['headers']['message'])
-        self.assertEquals("'message-id' is not allowed on 'SEND'.\n", errorReceived['message'])
-
-    def test_disconnect(self):
-        ''' Test DISCONNECT command'''
-        self.conn.disconnect()
-        self.assertFalse(self.conn.is_connected())
-
-    def test_disconnect_with_receipt(self):
-        ''' Test the DISCONNECT command with receipts '''
-        time.sleep(3)
-        self.listener.reset(1)
-        self.conn.send_frame("DISCONNECT", {"receipt": "test"})
-        self.assertTrue(self.listener.await())
-        self.assertEquals(1, len(self.listener.receipts))
-        receiptReceived = self.listener.receipts[0]['headers']['receipt-id']
-        self.assertEquals("test", receiptReceived
-                         , "Wrong receipt received: '" + receiptReceived + "'")
-
-    def unsub_test(self, dest, verbs, numRcts=0):
-        def afterfun():
-            self.conn.send(dest, "after-test")
-        subverb, unsubverb = verbs
-        self.assertListenerAfter(subverb, numMsgs=1,
-                           errMsg="FAILED to subscribe and send")
-        self.assertListenerAfter(unsubverb, numRcts=numRcts,
-                           errMsg="Incorrect responses from UNSUBSCRIBE")
-        self.assertListenerAfter(afterfun,
-                           errMsg="Still receiving messages")
-
-    def sub_and_send(self, dest, subid=None, receipt=None):
-        def subfun():
-            self.subscribe_dest(self.conn, dest, subid)
-            self.conn.send(dest, "test")
-        def unsubfun():
-            headers = {}
-            if receipt != None:
-                headers['receipt'] = receipt
-            self.unsubscribe_dest(self.conn, dest, subid, **headers)
-        return subfun, unsubfun
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/parsing.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/parsing.py
deleted file mode 100644 (file)
index 27326b6..0000000
+++ /dev/null
@@ -1,317 +0,0 @@
-'''
-Few tests for a rabbitmq-stomp adaptor. They intend to increase code coverage
-of the erlang stomp code.
-'''
-import unittest
-import re
-import socket
-import functools
-import time
-import sys
-
-def connect(cnames):
-    ''' Decorator that creates stomp connections and issues CONNECT '''
-    cmd=('CONNECT\n'
-        'login:guest\n'
-        'passcode:guest\n'
-        '\n'
-        '\n\0')
-    resp = ('CONNECTED\n'
-            'session:(.*)\n'
-            'heart-beat:0,0\n'
-            'server:RabbitMQ/(.*)\n'
-            'version:1.0\n'
-            '\n\x00')
-    def w(m):
-        @functools.wraps(m)
-        def wrapper(self, *args, **kwargs):
-            for cname in cnames:
-                sd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-                sd.settimeout(30000)
-                sd.connect((self.host, self.port))
-                sd.sendall(cmd)
-                self.match(resp, sd.recv(4096))
-                setattr(self, cname, sd)
-            try:
-                r = m(self, *args, **kwargs)
-            finally:
-                for cname in cnames:
-                    try:
-                        getattr(self, cname).close()
-                    except IOError:
-                        pass
-            return r
-        return wrapper
-    return w
-
-
-class TestParsing(unittest.TestCase):
-    host='127.0.0.1'
-    port=61613
-
-
-    def match(self, pattern, data):
-        ''' helper: try to match a regexp with a string.
-            Fail test if they do not match.
-        '''
-        matched = re.match(pattern, data)
-        if matched:
-            return matched.groups()
-        self.assertTrue(False, 'No match:\n%r\n%r' % (pattern, data) )
-
-    def recv_atleast(self, bufsize):
-        recvhead = []
-        rl = bufsize
-        while rl > 0:
-            buf = self.cd.recv(rl)
-            bl = len(buf)
-            if bl==0: break
-            recvhead.append( buf )
-            rl -= bl
-        return ''.join(recvhead)
-
-
-    @connect(['cd'])
-    def test_newline_after_nul(self):
-        self.cd.sendall('\n'
-                        'SUBSCRIBE\n'
-                        'destination:/exchange/amq.fanout\n'
-                        '\n\x00\n'
-                        'SEND\n'
-                        'content-type:text/plain\n'
-                        'destination:/exchange/amq.fanout\n\n'
-                        'hello\n\x00\n')
-        resp = ('MESSAGE\n'
-                'destination:/exchange/amq.fanout\n'
-                'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
-                'redelivered:false\n'
-                'content-type:text/plain\n'
-                'content-length:6\n'
-                '\n'
-                'hello\n\0')
-        self.match(resp, self.cd.recv(4096))
-
-    @connect(['cd'])
-    def test_send_without_content_type(self):
-        self.cd.sendall('\n'
-                        'SUBSCRIBE\n'
-                        'destination:/exchange/amq.fanout\n'
-                        '\n\x00\n'
-                        'SEND\n'
-                        'destination:/exchange/amq.fanout\n\n'
-                        'hello\n\x00')
-        resp = ('MESSAGE\n'
-                'destination:/exchange/amq.fanout\n'
-                'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
-                'redelivered:false\n'
-                'content-length:6\n'
-                '\n'
-                'hello\n\0')
-        self.match(resp, self.cd.recv(4096))
-
-    @connect(['cd'])
-    def test_send_without_content_type_binary(self):
-        msg = u'\u0ca0\ufffd\x00\n\x01hello\x00'.encode('utf-8')
-        self.cd.sendall('\n'
-                        'SUBSCRIBE\n'
-                        'destination:/exchange/amq.fanout\n'
-                        '\n\x00\n'
-                        'SEND\n'
-                        'destination:/exchange/amq.fanout\n'
-                        'content-length:'+str(len(msg))+'\n\n'
-                        + msg + '\x00')
-        resp = ('MESSAGE\n'
-                'destination:/exchange/amq.fanout\n'
-                'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
-                'redelivered:false\n'
-                'content-length:'+str(len(msg))+'\n'
-                '\n'
-                + msg + '\0')
-        self.match(resp, self.cd.recv(4096))
-
-    @connect(['cd'])
-    def test_newline_after_nul_and_leading_nul(self):
-        self.cd.sendall('\n'
-                        '\x00SUBSCRIBE\n'
-                        'destination:/exchange/amq.fanout\n'
-                        '\n\x00\n'
-                        '\x00SEND\n'
-                        'destination:/exchange/amq.fanout\n'
-                        'content-type:text/plain\n'
-                        '\nhello\n\x00\n')
-        resp = ('MESSAGE\n'
-                'destination:/exchange/amq.fanout\n'
-                'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
-                'redelivered:false\n'
-                'content-type:text/plain\n'
-                'content-length:6\n'
-                '\n'
-                'hello\n\0')
-        self.match(resp, self.cd.recv(4096))
-
-    @connect(['cd'])
-    def test_bad_command(self):
-        ''' Trigger an error message. '''
-        self.cd.sendall('WRONGCOMMAND\n'
-                        'destination:a\n'
-                        'exchange:amq.fanout\n'
-                        '\n\0')
-        resp = ('ERROR\n'
-                'message:Bad command\n'
-                'content-type:text/plain\n'
-                'version:1.0,1.1,1.2\n'
-                'content-length:43\n'
-                '\n'
-                'Could not interpret command "WRONGCOMMAND"\n'
-                '\0')
-        self.match(resp, self.cd.recv(4096))
-
-    @connect(['sd', 'cd1', 'cd2'])
-    def test_broadcast(self):
-        ''' Single message should be delivered to two consumers:
-            amq.topic --routing_key--> first_queue --> first_connection
-                     \--routing_key--> second_queue--> second_connection
-        '''
-        subscribe=( 'SUBSCRIBE\n'
-                    'id: XsKNhAf\n'
-                    'destination:/exchange/amq.topic/da9d4779\n'
-                    '\n\0')
-        for cd in [self.cd1, self.cd2]:
-            cd.sendall(subscribe)
-
-        time.sleep(0.1)
-
-        self.sd.sendall('SEND\n'
-                        'content-type:text/plain\n'
-                        'destination:/exchange/amq.topic/da9d4779\n'
-                        '\n'
-                        'message'
-                        '\n\0')
-
-        resp=('MESSAGE\n'
-            'subscription:(.*)\n'
-            'destination:/topic/da9d4779\n'
-            'message-id:(.*)\n'
-            'redelivered:false\n'
-            'content-type:text/plain\n'
-            'content-length:8\n'
-            '\n'
-            'message'
-            '\n\x00')
-        for cd in [self.cd1, self.cd2]:
-            self.match(resp, cd.recv(4096))
-
-    @connect(['cd'])
-    def test_message_with_embedded_nulls(self):
-        ''' Test sending/receiving message with embedded nulls. '''
-        dest='destination:/exchange/amq.topic/test_embed_nulls_message\n'
-        resp_dest='destination:/topic/test_embed_nulls_message\n'
-        subscribe=( 'SUBSCRIBE\n'
-                    'id:xxx\n'
-                    +dest+
-                    '\n\0')
-        self.cd.sendall(subscribe)
-
-        boilerplate = '0123456789'*1024 # large enough boilerplate
-        message = '01'
-        oldi = 2
-        for i in [5, 90, 256-1, 384-1, 512, 1024, 1024+256+64+32]:
-            message = message + '\0' + boilerplate[oldi+1:i]
-            oldi = i
-        msg_len = len(message)
-
-        self.cd.sendall('SEND\n'
-                        +dest+
-                        'content-type:text/plain\n'
-                        'content-length:%i\n'
-                        '\n'
-                        '%s'
-                        '\0' % (len(message), message) )
-
-        headresp=('MESSAGE\n'            # 8
-            'subscription:(.*)\n'        # 14 + subscription
-            +resp_dest+                  # 44
-            'message-id:(.*)\n'          # 12 + message-id
-            'redelivered:false\n'        # 18
-            'content-type:text/plain\n'  # 24
-            'content-length:%i\n'        # 16 + 4==len('1024')
-            '\n'                         # 1
-            '(.*)$'                      # prefix of body+null (potentially)
-             % len(message) )
-        headlen = 8 + 24 + 14 + (3) + 44 + 12 + 18 + (48) + 16 + (4) + 1 + (1)
-
-        headbuf = self.recv_atleast(headlen)
-        self.assertFalse(len(headbuf) == 0)
-
-        (sub, msg_id, bodyprefix) = self.match(headresp, headbuf)
-        bodyresp=( '%s\0' % message )
-        bodylen = len(bodyresp);
-
-        bodybuf = ''.join([bodyprefix,
-                           self.recv_atleast(bodylen - len(bodyprefix))])
-
-        self.assertEqual(len(bodybuf), msg_len+1,
-            "body received not the same length as message sent")
-        self.assertEqual(bodybuf, bodyresp,
-            "   body (...'%s')\nincorrectly returned as (...'%s')"
-            % (bodyresp[-10:], bodybuf[-10:]))
-
-    @connect(['cd'])
-    def test_message_in_packets(self):
-        ''' Test sending/receiving message in packets. '''
-        base_dest='topic/test_embed_nulls_message\n'
-        dest='destination:/exchange/amq.' + base_dest
-        resp_dest='destination:/'+ base_dest
-        subscribe=( 'SUBSCRIBE\n'
-                    'id:xxx\n'
-                    +dest+
-                    '\n\0')
-        self.cd.sendall(subscribe)
-
-        boilerplate = '0123456789'*1024 # large enough boilerplate
-
-        message = boilerplate[:1024 + 512 + 256 + 32]
-        msg_len = len(message)
-
-        msg_to_send = ('SEND\n'
-                       +dest+
-                       'content-type:text/plain\n'
-                       '\n'
-                       '%s'
-                       '\0' % (message) )
-        packet_size = 191
-        part_index = 0
-        msg_to_send_len = len(msg_to_send)
-        while part_index < msg_to_send_len:
-            part = msg_to_send[part_index:part_index+packet_size]
-            time.sleep(0.1)
-            self.cd.sendall(part)
-            part_index += packet_size
-
-        headresp=('MESSAGE\n'           # 8
-            'subscription:(.*)\n'       # 14 + subscription
-            +resp_dest+                 # 44
-            'message-id:(.*)\n'         # 12 + message-id
-            'redelivered:false\n'       # 18
-            'content-type:text/plain\n' # 24
-            'content-length:%i\n'       # 16 + 4==len('1024')
-            '\n'                        # 1
-            '(.*)$'                     # prefix of body+null (potentially)
-             % len(message) )
-        headlen = 8 + 24 + 14 + (3) + 44 + 12 + 18 + (48) + 16 + (4) + 1 + (1)
-
-        headbuf = self.recv_atleast(headlen)
-        self.assertFalse(len(headbuf) == 0)
-
-        (sub, msg_id, bodyprefix) = self.match(headresp, headbuf)
-        bodyresp=( '%s\0' % message )
-        bodylen = len(bodyresp);
-
-        bodybuf = ''.join([bodyprefix,
-                           self.recv_atleast(bodylen - len(bodyprefix))])
-
-        self.assertEqual(len(bodybuf), msg_len+1,
-            "body received not the same length as message sent")
-        self.assertEqual(bodybuf, bodyresp,
-            "   body ('%s')\nincorrectly returned as ('%s')"
-            % (bodyresp, bodybuf))
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/queue_properties.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/queue_properties.py
deleted file mode 100644 (file)
index cc85487..0000000
+++ /dev/null
@@ -1,79 +0,0 @@
-import unittest
-import stomp
-import pika
-import base
-import time
-
-class TestQueueProperties(base.BaseTest):
-
-    def test_subscribe(self):
-        destination = "/queue/queue-properties-subscribe-test"
-
-        # subscribe
-        self.subscribe_dest(self.conn, destination, None,
-                            headers={
-                                'x-message-ttl': 60000,
-                                'x-expires': 70000,
-                                'x-max-length': 10,
-                                'x-max-length-bytes': 20000,
-                                'x-dead-letter-exchange': 'dead-letter-exchange',
-                                'x-dead-letter-routing-key': 'dead-letter-routing-key',
-                                'x-max-priority': 6,
-                                })
-
-        # now try to declare the queue using pika
-        # if the properties are the same we should
-        # not get any error
-        connection = pika.BlockingConnection(pika.ConnectionParameters(
-                    host='localhost'))
-        channel = connection.channel()
-        channel.queue_declare(queue='queue-properties-subscribe-test',
-                              durable=True,
-                              arguments={
-                                  'x-message-ttl': 60000,
-                                  'x-expires': 70000,
-                                  'x-max-length': 10,
-                                  'x-max-length-bytes': 20000,
-                                  'x-dead-letter-exchange': 'dead-letter-exchange',
-                                  'x-dead-letter-routing-key': 'dead-letter-routing-key',
-                                  'x-max-priority': 6,
-                                  })
-
-        self.conn.disconnect()
-        connection.close()
-
-    def test_send(self):
-        destination = "/queue/queue-properties-send-test"
-
-        # send
-        self.conn.send(destination, "test1",
-                       headers={
-                           'x-message-ttl': 60000,
-                           'x-expires': 70000,
-                           'x-max-length': 10,
-                           'x-max-length-bytes': 20000,
-                           'x-dead-letter-exchange': 'dead-letter-exchange',
-                           'x-dead-letter-routing-key': 'dead-letter-routing-key',
-                           'x-max-priority': 6,
-                           })
-
-        # now try to declare the queue using pika
-        # if the properties are the same we should
-        # not get any error
-        connection = pika.BlockingConnection(pika.ConnectionParameters(
-                    host='localhost'))
-        channel = connection.channel()
-        channel.queue_declare(queue='queue-properties-send-test',
-                              durable=True,
-                              arguments={
-                                  'x-message-ttl': 60000,
-                                  'x-expires': 70000,
-                                  'x-max-length': 10,
-                                  'x-max-length-bytes': 20000,
-                                  'x-dead-letter-exchange': 'dead-letter-exchange',
-                                  'x-dead-letter-routing-key': 'dead-letter-routing-key',
-                                  'x-max-priority': 6,
-                                  })
-
-        self.conn.disconnect()
-        connection.close()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_amqqueue_test.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_amqqueue_test.erl
deleted file mode 100644 (file)
index 42c18ed..0000000
+++ /dev/null
@@ -1,225 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_stomp_amqqueue_test).
--export([all_tests/0]).
--compile(export_all).
-
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
--include("rabbit_stomp.hrl").
--include("rabbit_stomp_frame.hrl").
--include("rabbit_stomp_headers.hrl").
-
--define(QUEUE, <<"TestQueue">>).
--define(DESTINATION, "/amq/queue/TestQueue").
-
-all_tests() ->
-    [[ok = run_test(TestFun, Version)
-      || TestFun <- [fun test_subscribe_error/3,
-                     fun test_subscribe/3,
-                     fun test_unsubscribe_ack/3,
-                     fun test_subscribe_ack/3,
-                     fun test_send/3,
-                     fun test_delete_queue_subscribe/3,
-                     fun test_temp_destination_queue/3,
-                     fun test_temp_destination_in_send/3,
-                     fun test_blank_destination_in_send/3]]
-     || Version <- ?SUPPORTED_VERSIONS],
-    ok.
-
-run_test(TestFun, Version) ->
-    {ok, Connection} = amqp_connection:start(#amqp_params_direct{}),
-    {ok, Channel} = amqp_connection:open_channel(Connection),
-    {ok, Client} = rabbit_stomp_client:connect(Version),
-
-    Result = (catch TestFun(Channel, Client, Version)),
-
-    rabbit_stomp_client:disconnect(Client),
-    amqp_channel:close(Channel),
-    amqp_connection:close(Connection),
-    Result.
-
-test_subscribe_error(_Channel, Client, _Version) ->
-    %% SUBSCRIBE to missing queue
-    rabbit_stomp_client:send(
-      Client, "SUBSCRIBE", [{"destination", ?DESTINATION}]),
-    {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
-    "not_found" = proplists:get_value("message", Hdrs),
-    ok.
-
-test_subscribe(Channel, Client, _Version) ->
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = ?QUEUE,
-                                                    auto_delete = true}),
-
-    %% subscribe and wait for receipt
-    rabbit_stomp_client:send(
-      Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
-    {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
-
-    %% send from amqp
-    Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
-
-    amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
-                                                 payload = <<"hello">>}),
-
-    {ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
-    ok.
-
-test_unsubscribe_ack(Channel, Client, Version) ->
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = ?QUEUE,
-                                                    auto_delete = true}),
-    %% subscribe and wait for receipt
-    rabbit_stomp_client:send(
-      Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
-                            {"receipt", "rcpt1"},
-                            {"ack", "client"},
-                            {"id", "subscription-id"}]),
-    {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
-
-    %% send from amqp
-    Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
-
-    amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
-                                                 payload = <<"hello">>}),
-
-    {ok, Client2, Hdrs1, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
-
-    rabbit_stomp_client:send(
-      Client2, "UNSUBSCRIBE", [{"destination", ?DESTINATION},
-                              {"id", "subscription-id"}]),
-
-    rabbit_stomp_client:send(
-      Client2, "ACK", [{rabbit_stomp_util:ack_header_name(Version),
-                        proplists:get_value(
-                          rabbit_stomp_util:msg_header_name(Version), Hdrs1)},
-                       {"receipt", "rcpt2"}]),
-
-    {ok, _Client3, Hdrs2, _Body2} = stomp_receive(Client2, "ERROR"),
-    ?assertEqual("Subscription not found",
-                 proplists:get_value("message", Hdrs2)),
-    ok.
-
-test_subscribe_ack(Channel, Client, Version) ->
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = ?QUEUE,
-                                                    auto_delete = true}),
-
-    %% subscribe and wait for receipt
-    rabbit_stomp_client:send(
-      Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
-                            {"receipt",     "foo"},
-                            {"ack",         "client"}]),
-    {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
-
-    %% send from amqp
-    Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
-
-    amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
-                                                 payload = <<"hello">>}),
-
-    {ok, _Client2, Headers, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
-    false = (Version == "1.2") xor proplists:is_defined(?HEADER_ACK, Headers),
-
-    MsgHeader = rabbit_stomp_util:msg_header_name(Version),
-    AckValue  = proplists:get_value(MsgHeader, Headers),
-    AckHeader = rabbit_stomp_util:ack_header_name(Version),
-
-    rabbit_stomp_client:send(Client, "ACK", [{AckHeader, AckValue}]),
-    #'basic.get_empty'{} =
-        amqp_channel:call(Channel, #'basic.get'{queue = ?QUEUE}),
-    ok.
-
-test_send(Channel, Client, _Version) ->
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = ?QUEUE,
-                                                    auto_delete = true}),
-
-    %% subscribe and wait for receipt
-    rabbit_stomp_client:send(
-      Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
-    {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
-
-    %% send from stomp
-    rabbit_stomp_client:send(
-      Client1, "SEND", [{"destination", ?DESTINATION}], ["hello"]),
-
-    {ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
-    ok.
-
-test_delete_queue_subscribe(Channel, Client, _Version) ->
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = ?QUEUE,
-                                                    auto_delete = true}),
-
-    %% subscribe and wait for receipt
-    rabbit_stomp_client:send(
-      Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "bah"}]),
-    {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
-
-    %% delete queue while subscribed
-    #'queue.delete_ok'{} =
-        amqp_channel:call(Channel, #'queue.delete'{queue = ?QUEUE}),
-
-    {ok, _Client2, Headers, _} = stomp_receive(Client1, "ERROR"),
-
-    ?DESTINATION = proplists:get_value("subscription", Headers),
-
-    % server closes connection
-    ok.
-
-test_temp_destination_queue(Channel, Client, _Version) ->
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = ?QUEUE,
-                                                    auto_delete = true}),
-    rabbit_stomp_client:send( Client, "SEND", [{"destination", ?DESTINATION},
-                                               {"reply-to", "/temp-queue/foo"}],
-                                              ["ping"]),
-    amqp_channel:call(Channel,#'basic.consume'{queue  = ?QUEUE, no_ack = true}),
-    receive #'basic.consume_ok'{consumer_tag = _Tag} -> ok end,
-    receive {#'basic.deliver'{delivery_tag = _DTag},
-             #'amqp_msg'{payload = <<"ping">>,
-                         props   = #'P_basic'{reply_to = ReplyTo}}} -> ok
-    end,
-    ok = amqp_channel:call(Channel,
-                           #'basic.publish'{routing_key = ReplyTo},
-                           #amqp_msg{payload = <<"pong">>}),
-    {ok, _Client1, _, [<<"pong">>]} = stomp_receive(Client, "MESSAGE"),
-    ok.
-
-test_temp_destination_in_send(_Channel, Client, _Version) ->
-    rabbit_stomp_client:send( Client, "SEND", [{"destination", "/temp-queue/foo"}],
-                                              ["poing"]),
-    {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
-    "Invalid destination" = proplists:get_value("message", Hdrs),
-    ok.
-
-test_blank_destination_in_send(_Channel, Client, _Version) ->
-    rabbit_stomp_client:send( Client, "SEND", [{"destination", ""}],
-                                              ["poing"]),
-    {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
-    "Invalid destination" = proplists:get_value("message", Hdrs),
-    ok.
-
-stomp_receive(Client, Command) ->
-    {#stomp_frame{command     = Command,
-                  headers     = Hdrs,
-                  body_iolist = Body},   Client1} =
-    rabbit_stomp_client:recv(Client),
-    {ok, Client1, Hdrs, Body}.
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_client.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_client.erl
deleted file mode 100644 (file)
index 2ae0699..0000000
+++ /dev/null
@@ -1,83 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developers of the Original Code are Rabbit Technologies Ltd.
-%%
-%%   Copyright (C) 2011 Rabbit Technologies Ltd.
-%%
-%%   All Rights Reserved.
-%%
-%%   Contributor(s): ______________________________________.
-%%
-
-%% The stupidest client imaginable, just for testing.
-
--module(rabbit_stomp_client).
-
--export([connect/0, connect/1, disconnect/1, send/2, send/3, send/4, recv/1]).
-
--include("rabbit_stomp_frame.hrl").
-
--define(TIMEOUT, 1000). % milliseconds
-
-connect()  -> connect0([]).
-connect(V) -> connect0([{"accept-version", V}]).
-
-connect0(Version) ->
-    {ok, Sock} = gen_tcp:connect(localhost, 61613, [{active, false}, binary]),
-    Client0 = recv_state(Sock),
-    send(Client0, "CONNECT", [{"login", "guest"},
-                              {"passcode", "guest"} | Version]),
-    {#stomp_frame{command = "CONNECTED"}, Client1} = recv(Client0),
-    {ok, Client1}.
-
-disconnect(Client = {Sock, _}) ->
-    send(Client, "DISCONNECT"),
-    gen_tcp:close(Sock).
-
-send(Client, Command) ->
-    send(Client, Command, []).
-
-send(Client, Command, Headers) ->
-    send(Client, Command, Headers, []).
-
-send({Sock, _}, Command, Headers, Body) ->
-    Frame = rabbit_stomp_frame:serialize(
-              #stomp_frame{command     = list_to_binary(Command),
-                           headers     = Headers,
-                           body_iolist = Body}),
-    gen_tcp:send(Sock, Frame).
-
-recv_state(Sock) ->
-    {Sock, []}.
-
-recv({_Sock, []} = Client) ->
-    recv(Client, rabbit_stomp_frame:initial_state(), 0);
-recv({Sock, [Frame | Frames]}) ->
-    {Frame, {Sock, Frames}}.
-
-recv(Client = {Sock, _}, FrameState, Length) ->
-    {ok, Payload} = gen_tcp:recv(Sock, Length, ?TIMEOUT),
-    parse(Payload, Client, FrameState, Length).
-
-parse(Payload, Client = {Sock, FramesRev}, FrameState, Length) ->
-    case rabbit_stomp_frame:parse(Payload, FrameState) of
-        {ok, Frame, <<>>} ->
-            recv({Sock, lists:reverse([Frame | FramesRev])});
-        {ok, Frame, <<"\n">>} ->
-            recv({Sock, lists:reverse([Frame | FramesRev])});
-        {ok, Frame, Rest} ->
-            parse(Rest, {Sock, [Frame | FramesRev]},
-                  rabbit_stomp_frame:initial_state(), Length);
-        {more, NewState} ->
-            recv(Client, NewState, 0)
-    end.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_publish_test.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_publish_test.erl
deleted file mode 100644 (file)
index a67e9aa..0000000
+++ /dev/null
@@ -1,88 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developers of the Original Code are Rabbit Technologies Ltd.
-%%
-%%   Copyright (C) 2011 Rabbit Technologies Ltd.
-%%
-%%   All Rights Reserved.
-%%
-%%   Contributor(s): ______________________________________.
-%%
--module(rabbit_stomp_publish_test).
-
--export([run/0]).
-
--include("rabbit_stomp_frame.hrl").
-
--define(DESTINATION, "/queue/test").
-
--define(MICROS_PER_UPDATE,     5000000).
--define(MICROS_PER_UPDATE_MSG, 100000).
--define(MICROS_PER_SECOND,     1000000).
-
-%% A very simple publish-and-consume-as-fast-as-you-can test.
-
-run() ->
-    [put(K, 0) || K <- [sent, recd, last_sent, last_recd]],
-    put(last_ts, os:timestamp()),
-    {ok, Pub} = rabbit_stomp_client:connect(),
-    {ok, Recv} = rabbit_stomp_client:connect(),
-    Self = self(),
-    spawn(fun() -> publish(Self, Pub, 0, os:timestamp()) end),
-    rabbit_stomp_client:send(
-      Recv, "SUBSCRIBE", [{"destination", ?DESTINATION}]),
-    spawn(fun() -> recv(Self, Recv, 0, os:timestamp()) end),
-    report().
-
-report() ->
-    receive
-        {sent, C} -> put(sent, C);
-        {recd, C} -> put(recd, C)
-    end,
-    Diff = timer:now_diff(os:timestamp(), get(last_ts)),
-    case Diff > ?MICROS_PER_UPDATE of
-        true  -> S = get(sent) - get(last_sent),
-                 R = get(recd) - get(last_recd),
-                 put(last_sent, get(sent)),
-                 put(last_recd, get(recd)),
-                 put(last_ts, os:timestamp()),
-                 io:format("Send ~p msg/s | Recv ~p msg/s~n",
-                           [trunc(S * ?MICROS_PER_SECOND / Diff),
-                            trunc(R * ?MICROS_PER_SECOND / Diff)]);
-        false -> ok
-    end,
-    report().
-
-publish(Owner, Client, Count, TS) ->
-    rabbit_stomp_client:send(
-      Client, "SEND", [{"destination", ?DESTINATION}],
-      [integer_to_list(Count)]),
-    Diff = timer:now_diff(os:timestamp(), TS),
-    case Diff > ?MICROS_PER_UPDATE_MSG of
-        true  -> Owner ! {sent, Count + 1},
-                 publish(Owner, Client, Count + 1, os:timestamp());
-        false -> publish(Owner, Client, Count + 1, TS)
-    end.
-
-recv(Owner, Client0, Count, TS) ->
-    {#stomp_frame{body_iolist = Body}, Client1} =
-        rabbit_stomp_client:recv(Client0),
-    BodyInt = list_to_integer(binary_to_list(iolist_to_binary(Body))),
-    Count = BodyInt,
-    Diff = timer:now_diff(os:timestamp(), TS),
-    case Diff > ?MICROS_PER_UPDATE_MSG of
-        true  -> Owner ! {recd, Count + 1},
-                 recv(Owner, Client1, Count + 1, os:timestamp());
-        false -> recv(Owner, Client1, Count + 1, TS)
-    end.
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test.erl
deleted file mode 100644 (file)
index 2f5b580..0000000
+++ /dev/null
@@ -1,65 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_stomp_test).
--export([all_tests/0]).
--import(rabbit_misc, [pget/2]).
-
--include_lib("amqp_client/include/amqp_client.hrl").
--include("rabbit_stomp_frame.hrl").
--define(DESTINATION, "/queue/bulk-test").
-
-all_tests() ->
-    test_messages_not_dropped_on_disconnect(),
-    test_direct_client_connections_are_not_leaked(),
-    ok.
-
--define(GARBAGE, <<"bdaf63dda9d78b075c748b740e7c3510ad203b07\nbdaf63dd">>).
-
-count_connections() ->
-    length(supervisor2:which_children(rabbit_stomp_client_sup_sup)).
-
-test_direct_client_connections_are_not_leaked() ->
-    N = count_connections(),
-    lists:foreach(fun (_) ->
-                          {ok, Client = {Socket, _}} = rabbit_stomp_client:connect(),
-                          %% send garbage which trips up the parser
-                          gen_tcp:send(Socket, ?GARBAGE),
-                          rabbit_stomp_client:send(
-                           Client, "LOL", [{"", ""}])
-                  end,
-                  lists:seq(1, 100)),
-    timer:sleep(5000),
-    N = count_connections(),
-    ok.
-
-test_messages_not_dropped_on_disconnect() ->
-    N = count_connections(),
-    {ok, Client} = rabbit_stomp_client:connect(),
-    N1 = N + 1,
-    N1 = count_connections(),
-    [rabbit_stomp_client:send(
-       Client, "SEND", [{"destination", ?DESTINATION}],
-       [integer_to_list(Count)]) || Count <- lists:seq(1, 1000)],
-    rabbit_stomp_client:disconnect(Client),
-    QName = rabbit_misc:r(<<"/">>, queue, <<"bulk-test">>),
-    timer:sleep(3000),
-    N = count_connections(),
-    rabbit_amqqueue:with(
-      QName, fun(Q) ->
-                     1000 = pget(messages, rabbit_amqqueue:info(Q, [messages]))
-             end),
-    ok.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test_frame.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test_frame.erl
deleted file mode 100644 (file)
index c53fff7..0000000
+++ /dev/null
@@ -1,171 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_stomp_test_frame).
-
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
--include("rabbit_stomp_frame.hrl").
--include("rabbit_stomp_headers.hrl").
-
-parse_simple_frame_test() ->
-    parse_simple_frame_gen("\n").
-
-parse_simple_frame_crlf_test() ->
-    parse_simple_frame_gen("\r\n").
-
-parse_simple_frame_gen(Term) ->
-    Headers = [{"header1", "value1"}, {"header2", "value2"}],
-    Content = frame_string("COMMAND",
-                           Headers,
-                           "Body Content",
-                           Term),
-    {"COMMAND", Frame, _State} = parse_complete(Content),
-    [?assertEqual({ok, Value},
-                  rabbit_stomp_frame:header(Frame, Key)) ||
-        {Key, Value} <- Headers],
-    #stomp_frame{body_iolist = Body} = Frame,
-    ?assertEqual(<<"Body Content">>, iolist_to_binary(Body)).
-
-parse_command_only_test() ->
-    {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("COMMAND\n\n\0").
-
-parse_ignore_empty_frames_test() ->
-    {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\0\0COMMAND\n\n\0").
-
-parse_heartbeat_interframe_test() ->
-    {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\nCOMMAND\n\n\0").
-
-parse_crlf_interframe_test() ->
-    {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\r\nCOMMAND\n\n\0").
-
-parse_carriage_return_not_ignored_interframe_test() ->
-    {error, {unexpected_chars_between_frames, "\rC"}} = parse("\rCOMMAND\n\n\0").
-
-parse_carriage_return_mid_command_test() ->
-    {error, {unexpected_chars_in_command, "\rA"}} = parse("COMM\rAND\n\n\0").
-
-parse_carriage_return_end_command_test() ->
-    {error, {unexpected_chars_in_command, "\r\r"}} = parse("COMMAND\r\r\n\n\0").
-
-parse_resume_mid_command_test() ->
-    First = "COMM",
-    Second = "AND\n\n\0",
-    {more, Resume} = parse(First),
-    {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse(Second, Resume).
-
-parse_resume_mid_header_key_test() ->
-    First = "COMMAND\nheade",
-    Second = "r1:value1\n\n\0",
-    {more, Resume} = parse(First),
-    {ok, Frame = #stomp_frame{command = "COMMAND"}, _Rest} =
-        parse(Second, Resume),
-    ?assertEqual({ok, "value1"},
-                 rabbit_stomp_frame:header(Frame, "header1")).
-
-parse_resume_mid_header_val_test() ->
-    First = "COMMAND\nheader1:val",
-    Second = "ue1\n\n\0",
-    {more, Resume} = parse(First),
-    {ok, Frame = #stomp_frame{command = "COMMAND"}, _Rest} =
-        parse(Second, Resume),
-    ?assertEqual({ok, "value1"},
-                 rabbit_stomp_frame:header(Frame, "header1")).
-
-parse_resume_mid_body_test() ->
-    First = "COMMAND\n\nABC",
-    Second = "DEF\0",
-    {more, Resume} = parse(First),
-    {ok, #stomp_frame{command = "COMMAND", body_iolist = Body}, _Rest} =
-         parse(Second, Resume),
-    ?assertEqual([<<"ABC">>, <<"DEF">>], Body).
-
-parse_no_header_stripping_test() ->
-    Content = "COMMAND\nheader: foo \n\n\0",
-    {ok, Frame, _} = parse(Content),
-    {ok, Val} = rabbit_stomp_frame:header(Frame, "header"),
-    ?assertEqual(" foo ", Val).
-
-parse_multiple_headers_test() ->
-    Content = "COMMAND\nheader:correct\nheader:incorrect\n\n\0",
-    {ok, Frame, _} = parse(Content),
-    {ok, Val} = rabbit_stomp_frame:header(Frame, "header"),
-    ?assertEqual("correct", Val).
-
-header_no_colon_test() ->
-    Content = "COMMAND\n"
-              "hdr1:val1\n"
-              "hdrerror\n"
-              "hdr2:val2\n"
-              "\n\0",
-    ?assertEqual(parse(Content), {error, {header_no_value, "hdrerror"}}).
-
-no_nested_escapes_test() ->
-    Content = "COM\\\\rAND\n"      % no escapes
-              "hdr\\\\rname:"      % one escape
-              "hdr\\\\rval\n\n\0", % one escape
-    {ok, Frame, _} = parse(Content),
-    ?assertEqual(Frame,
-                 #stomp_frame{command = "COM\\\\rAND",
-                              headers = [{"hdr\\rname", "hdr\\rval"}],
-                              body_iolist = []}).
-
-header_name_with_cr_test() ->
-    Content = "COMMAND\nhead\rer:val\n\n\0",
-    {error, {unexpected_chars_in_header, "\re"}} = parse(Content).
-
-header_value_with_cr_test() ->
-    Content = "COMMAND\nheader:val\rue\n\n\0",
-    {error, {unexpected_chars_in_header, "\ru"}} = parse(Content).
-
-header_value_with_colon_test() ->
-    Content = "COMMAND\nheader:val:ue\n\n\0",
-    {ok, Frame, _} = parse(Content),
-    ?assertEqual(Frame,
-                 #stomp_frame{ command     = "COMMAND",
-                               headers     = [{"header", "val:ue"}],
-                               body_iolist = []}).
-
-test_frame_serialization(Expected, TrailingLF) ->
-    {ok, Frame, _} = parse(Expected),
-    {ok, Val} = rabbit_stomp_frame:header(Frame, "head\r:\ner"),
-    ?assertEqual(":\n\r\\", Val),
-    Serialized = lists:flatten(rabbit_stomp_frame:serialize(Frame, TrailingLF)),
-    ?assertEqual(Expected, rabbit_misc:format("~s", [Serialized])).
-
-headers_escaping_roundtrip_test() ->
-    test_frame_serialization("COMMAND\nhead\\r\\c\\ner:\\c\\n\\r\\\\\n\n\0\n", true).
-
-headers_escaping_roundtrip_without_trailing_lf_test() ->
-    test_frame_serialization("COMMAND\nhead\\r\\c\\ner:\\c\\n\\r\\\\\n\n\0", false).
-
-parse(Content) ->
-    parse(Content, rabbit_stomp_frame:initial_state()).
-parse(Content, State) ->
-    rabbit_stomp_frame:parse(list_to_binary(Content), State).
-
-parse_complete(Content) ->
-    {ok, Frame = #stomp_frame{command = Command}, State} = parse(Content),
-    {Command, Frame, State}.
-
-frame_string(Command, Headers, BodyContent) ->
-    frame_string(Command, Headers, BodyContent, "\n").
-
-frame_string(Command, Headers, BodyContent, Term) ->
-    HeaderString =
-        lists:flatten([Key ++ ":" ++ Value ++ Term || {Key, Value} <- Headers]),
-    Command ++ Term ++ HeaderString ++ Term ++ BodyContent ++ "\0" ++ "\n".
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/rabbit_stomp_test_util.erl
deleted file mode 100644 (file)
index a25e306..0000000
+++ /dev/null
@@ -1,226 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_stomp_test_util).
-
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
--include_lib("amqp_client/include/rabbit_routing_prefixes.hrl").
--include("rabbit_stomp_frame.hrl").
-
-%%--------------------------------------------------------------------
-%% Header Processing Tests
-%%--------------------------------------------------------------------
-
-longstr_field_test() ->
-    {<<"ABC">>, longstr, <<"DEF">>} =
-        rabbit_stomp_util:longstr_field("ABC", "DEF").
-
-message_properties_test() ->
-    Headers = [
-                {"content-type", "text/plain"},
-                {"content-encoding", "UTF-8"},
-                {"persistent", "true"},
-                {"priority", "1"},
-                {"correlation-id", "123"},
-                {"reply-to", "something"},
-                {"expiration", "my-expiration"},
-                {"amqp-message-id", "M123"},
-                {"timestamp", "123456"},
-                {"type", "freshly-squeezed"},
-                {"user-id", "joe"},
-                {"app-id", "joe's app"},
-                {"str", "foo"},
-                {"int", "123"}
-              ],
-
-    #'P_basic'{
-                content_type     = <<"text/plain">>,
-                content_encoding = <<"UTF-8">>,
-                delivery_mode    = 2,
-                priority         = 1,
-                correlation_id   = <<"123">>,
-                reply_to         = <<"something">>,
-                expiration       = <<"my-expiration">>,
-                message_id       = <<"M123">>,
-                timestamp        = 123456,
-                type             = <<"freshly-squeezed">>,
-                user_id          = <<"joe">>,
-                app_id           = <<"joe's app">>,
-                headers          = [{<<"str">>, longstr, <<"foo">>},
-                                    {<<"int">>, longstr, <<"123">>}]
-              } =
-        rabbit_stomp_util:message_properties(#stomp_frame{headers = Headers}).
-
-message_headers_test() ->
-    Properties = #'P_basic'{
-      headers          = [{<<"str">>, longstr, <<"foo">>},
-                          {<<"int">>, signedint, 123}],
-      content_type     = <<"text/plain">>,
-      content_encoding = <<"UTF-8">>,
-      delivery_mode    = 2,
-      priority         = 1,
-      correlation_id   = 123,
-      reply_to         = <<"something">>,
-      message_id       = <<"M123">>,
-      timestamp        = 123456,
-      type             = <<"freshly-squeezed">>,
-      user_id          = <<"joe">>,
-      app_id           = <<"joe's app">>},
-
-    Headers = rabbit_stomp_util:message_headers(Properties),
-
-    Expected = [
-                {"content-type", "text/plain"},
-                {"content-encoding", "UTF-8"},
-                {"persistent", "true"},
-                {"priority", "1"},
-                {"correlation-id", "123"},
-                {"reply-to", "something"},
-                {"expiration", "my-expiration"},
-                {"amqp-message-id", "M123"},
-                {"timestamp", "123456"},
-                {"type", "freshly-squeezed"},
-                {"user-id", "joe"},
-                {"app-id", "joe's app"},
-                {"str", "foo"},
-                {"int", "123"}
-               ],
-
-    [] = lists:subtract(Headers, Expected).
-
-minimal_message_headers_with_no_custom_test() ->
-    Delivery = #'basic.deliver'{
-      consumer_tag = <<"Q_123">>,
-      delivery_tag = 123,
-      exchange = <<"">>,
-      routing_key = <<"foo">>},
-
-    Properties = #'P_basic'{},
-
-    Headers = rabbit_stomp_util:message_headers(Properties),
-    Expected = [
-                {"content-type", "text/plain"},
-                {"content-encoding", "UTF-8"},
-                {"amqp-message-id", "M123"}
-               ],
-
-    [] = lists:subtract(Headers, Expected).
-
-headers_post_process_test() ->
-    Headers  = [{"header1", "1"},
-                {"header2", "12"},
-                {"reply-to", "something"}],
-    Expected = [{"header1", "1"},
-                {"header2", "12"},
-                {"reply-to", "/reply-queue/something"}],
-    [] = lists:subtract(
-           rabbit_stomp_util:headers_post_process(Headers), Expected).
-
-headers_post_process_noop_replyto_test() ->
-    [begin
-         Headers = [{"reply-to", Prefix ++ "/something"}],
-         Headers = rabbit_stomp_util:headers_post_process(Headers)
-     end || Prefix <- rabbit_routing_util:dest_prefixes()].
-
-headers_post_process_noop2_test() ->
-    Headers  = [{"header1", "1"},
-                {"header2", "12"}],
-    Expected = [{"header1", "1"},
-                {"header2", "12"}],
-    [] = lists:subtract(
-           rabbit_stomp_util:headers_post_process(Headers), Expected).
-
-negotiate_version_both_empty_test() ->
-    {error, no_common_version} = rabbit_stomp_util:negotiate_version([],[]).
-
-negotiate_version_no_common_test() ->
-    {error, no_common_version} =
-        rabbit_stomp_util:negotiate_version(["1.2"],["1.3"]).
-
-negotiate_version_simple_common_test() ->
-    {ok, "1.2"} =
-        rabbit_stomp_util:negotiate_version(["1.2"],["1.2"]).
-
-negotiate_version_two_choice_common_test() ->
-    {ok, "1.3"} =
-        rabbit_stomp_util:negotiate_version(["1.2", "1.3"],["1.2", "1.3"]).
-
-negotiate_version_two_choice_common_out_of_order_test() ->
-    {ok, "1.3"} =
-        rabbit_stomp_util:negotiate_version(["1.3", "1.2"],["1.2", "1.3"]).
-
-negotiate_version_two_choice_big_common_test() ->
-    {ok, "1.20.23"} =
-        rabbit_stomp_util:negotiate_version(["1.20.23", "1.30.456"],
-                                            ["1.20.23", "1.30.457"]).
-negotiate_version_choice_mismatched_length_test() ->
-    {ok, "1.2.3"} =
-        rabbit_stomp_util:negotiate_version(["1.2", "1.2.3"],
-                                            ["1.2.3", "1.2"]).
-negotiate_version_choice_duplicates_test() ->
-    {ok, "1.2"} =
-        rabbit_stomp_util:negotiate_version(["1.2", "1.2"],
-                                            ["1.2", "1.2"]).
-trim_headers_test() ->
-    #stomp_frame{headers = [{"one", "foo"}, {"two", "baz "}]} =
-        rabbit_stomp_util:trim_headers(
-          #stomp_frame{headers = [{"one", "  foo"}, {"two", " baz "}]}).
-
-%%--------------------------------------------------------------------
-%% Frame Parsing Tests
-%%--------------------------------------------------------------------
-
-ack_mode_auto_test() ->
-    Frame = #stomp_frame{headers = [{"ack", "auto"}]},
-    {auto, _} = rabbit_stomp_util:ack_mode(Frame).
-
-ack_mode_auto_default_test() ->
-    Frame = #stomp_frame{headers = []},
-    {auto, _} = rabbit_stomp_util:ack_mode(Frame).
-
-ack_mode_client_test() ->
-    Frame = #stomp_frame{headers = [{"ack", "client"}]},
-    {client, true} = rabbit_stomp_util:ack_mode(Frame).
-
-ack_mode_client_individual_test() ->
-    Frame = #stomp_frame{headers = [{"ack", "client-individual"}]},
-    {client, false} = rabbit_stomp_util:ack_mode(Frame).
-
-consumer_tag_id_test() ->
-    Frame = #stomp_frame{headers = [{"id", "foo"}]},
-    {ok, <<"T_foo">>, _} = rabbit_stomp_util:consumer_tag(Frame).
-
-consumer_tag_destination_test() ->
-    Frame = #stomp_frame{headers = [{"destination", "foo"}]},
-    {ok, <<"Q_foo">>, _} = rabbit_stomp_util:consumer_tag(Frame).
-
-consumer_tag_invalid_test() ->
-    Frame = #stomp_frame{headers = []},
-    {error, missing_destination_header} = rabbit_stomp_util:consumer_tag(Frame).
-
-%%--------------------------------------------------------------------
-%% Message ID Parsing Tests
-%%--------------------------------------------------------------------
-
-parse_valid_message_id_test() ->
-    {ok, {<<"bar">>, "abc", 123}} =
-        rabbit_stomp_util:parse_message_id("bar@@abc@@123").
-
-parse_invalid_message_id_test() ->
-    {error, invalid_message_id} =
-        rabbit_stomp_util:parse_message_id("blah").
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/redelivered.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/redelivered.py
deleted file mode 100644 (file)
index bbabd3f..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-import unittest
-import stomp
-import base
-import time
-
-class TestRedelivered(base.BaseTest):
-
-    def test_redelivered(self):
-        destination = "/queue/redelivered-test"
-
-        # subscribe and send message
-        self.subscribe_dest(self.conn, destination, None, ack='client')
-        self.conn.send(destination, "test1")
-        self.assertTrue(self.listener.await(4), "initial message not received")
-        self.assertEquals(1, len(self.listener.messages))
-        self.assertEquals('false', self.listener.messages[0]['headers']['redelivered'])
-
-        # disconnect with no ack
-        self.conn.disconnect()
-
-        # now reconnect
-        conn2 = self.create_connection()
-        try:
-            listener2 = base.WaitableListener()
-            listener2.reset(1)
-            conn2.set_listener('', listener2)
-            self.subscribe_dest(conn2, destination, None, ack='client')
-            self.assertTrue(listener2.await(), "message not received again")
-            self.assertEquals(1, len(listener2.messages))
-            self.assertEquals('true', listener2.messages[0]['headers']['redelivered'])
-        finally:
-            conn2.disconnect()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/reliability.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/reliability.py
deleted file mode 100644 (file)
index b8bb150..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-import base
-import stomp
-import unittest
-import time
-
-class TestReliability(base.BaseTest):
-
-    def test_send_and_disconnect(self):
-        ''' Test close socket after send does not lose messages '''
-        destination = "/queue/reliability"
-        pub_conn = self.create_connection()
-        try:
-            msg = "0" * (128)
-
-            count = 10000
-
-            listener = base.WaitableListener()
-            listener.reset(count)
-            self.conn.set_listener('', listener)
-            self.subscribe_dest(self.conn, destination, None)
-
-            for x in range(0, count):
-                pub_conn.send(destination, msg + str(x))
-            time.sleep(2.0)
-            pub_conn.disconnect()
-
-            if listener.await(30):
-                self.assertEquals(count, len(listener.messages))
-            else:
-                listener.print_state("Final state of listener:")
-                self.fail("Did not receive %s messages in time" % count)
-        finally:
-            if pub_conn.is_connected():
-                pub_conn.disconnect()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/ssl_lifecycle.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/ssl_lifecycle.py
deleted file mode 100644 (file)
index 53636df..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-import unittest
-import os
-import os.path
-import sys
-
-import stomp
-import base
-import ssl
-
-
-base_path = os.path.dirname(sys.argv[0])
-
-ssl_key_file = os.path.abspath(base_path + "/../certs/client/key.pem")
-ssl_cert_file = os.path.abspath(base_path + "/../certs/client/cert.pem")
-ssl_ca_certs = os.path.abspath(base_path  + "/../certs/testca/cacert.pem")
-
-class TestSslClient(unittest.TestCase):
-
-    def __ssl_connect(self):
-        conn = stomp.Connection(host_and_ports = [ ('localhost', 61614) ],
-                                use_ssl = True, ssl_key_file = ssl_key_file,
-                                ssl_cert_file = ssl_cert_file,
-                                ssl_ca_certs = ssl_ca_certs)
-        print "FILE: ", ssl_cert_file
-        conn.start()
-        conn.connect("guest", "guest")
-        return conn
-
-    def __ssl_auth_connect(self):
-        conn = stomp.Connection(host_and_ports = [ ('localhost', 61614) ],
-                                use_ssl = True, ssl_key_file = ssl_key_file,
-                                ssl_cert_file = ssl_cert_file,
-                                ssl_ca_certs = ssl_ca_certs)
-        conn.start()
-        conn.connect()
-        return conn
-
-    def test_ssl_connect(self):
-        conn = self.__ssl_connect()
-        conn.disconnect()
-
-    def test_ssl_auth_connect(self):
-        conn = self.__ssl_auth_connect()
-        conn.disconnect()
-
-    def test_ssl_send_receive(self):
-        conn = self.__ssl_connect()
-        self.__test_conn(conn)
-
-    def test_ssl_auth_send_receive(self):
-        conn = self.__ssl_auth_connect()
-        self.__test_conn(conn)
-
-    def __test_conn(self, conn):
-        try:
-            listener = base.WaitableListener()
-
-            conn.set_listener('', listener)
-
-            d = "/topic/ssl.test"
-            conn.subscribe(destination=d, ack="auto", id="ctag", receipt="sub")
-
-            self.assertTrue(listener.await(1))
-
-            self.assertEquals("sub",
-                              listener.receipts[0]['headers']['receipt-id'])
-
-            listener.reset(1)
-            conn.send(body="Hello SSL!", destination=d)
-
-            self.assertTrue(listener.await())
-
-            self.assertEquals("Hello SSL!", listener.messages[0]['message'])
-        finally:
-            conn.disconnect()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test.config b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test.config
deleted file mode 100644 (file)
index 4fd77fb..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-[{rabbitmq_stomp, [{default_user,     []},
-                   {ssl_cert_login,   true},
-                   {ssl_listeners,    [61614]}
-                  ]},
- {rabbit, [{ssl_options, [{cacertfile,"%%CERTS_DIR%%/testca/cacert.pem"},
-                          {certfile,"%%CERTS_DIR%%/server/cert.pem"},
-                          {keyfile,"%%CERTS_DIR%%/server/key.pem"},
-                          {verify,verify_peer},
-                          {fail_if_no_peer_cert,true}
-                         ]}
-          ]}
-].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test.py
deleted file mode 100755 (executable)
index 381c5b4..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-
-import test_runner
-
-if __name__ == '__main__':
-    modules = [
-        'ack',
-        'destinations',
-        'errors',
-        'lifecycle',
-        'parsing',
-        'queue_properties',
-        'redelivered',
-        'reliability',
-        'transactions',
-    ]
-    test_runner.run_unittests(modules)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_connect_options.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_connect_options.py
deleted file mode 100755 (executable)
index 6822f97..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-
-import test_runner
-
-if __name__ == '__main__':
-    modules = ['connect_options']
-    test_runner.run_unittests(modules)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_runner.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_runner.py
deleted file mode 100644 (file)
index 90a5456..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-import unittest
-import sys
-import os
-
-def add_deps_to_path():
-    deps_dir = os.path.realpath(os.path.join(__file__, "..", "..", "..", "deps"))
-    sys.path.append(os.path.join(deps_dir, "stomppy", "stomppy"))
-    sys.path.append(os.path.join(deps_dir, "pika", "pika"))
-
-def run_unittests(modules):
-    add_deps_to_path()
-
-    suite = unittest.TestSuite()
-    for m in modules:
-        mod = __import__(m)
-        for name in dir(mod):
-            obj = getattr(mod, name)
-            if name.startswith("Test") and issubclass(obj, unittest.TestCase):
-                suite.addTest(unittest.TestLoader().loadTestsFromTestCase(obj))
-
-    ts = unittest.TextTestRunner().run(unittest.TestSuite(suite))
-    if ts.errors or ts.failures:
-        sys.exit(1)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_ssl.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_ssl.py
deleted file mode 100755 (executable)
index e96be6a..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-import test_runner
-import test_util
-
-if __name__ == '__main__':
-    modules = ['ssl_lifecycle']
-    test_util.ensure_ssl_auth_user()
-    test_runner.run_unittests(modules)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_util.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/test_util.py
deleted file mode 100644 (file)
index f22fd66..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-import subprocess
-import socket
-import sys
-import os
-import os.path
-
-def ensure_ssl_auth_user():
-    user = 'O=client,CN=%s' % socket.gethostname()
-    rabbitmqctl(['stop_app'])
-    rabbitmqctl(['reset'])
-    rabbitmqctl(['start_app'])
-    rabbitmqctl(['add_user', user, 'foo'])
-    rabbitmqctl(['clear_password', user])
-    rabbitmqctl(['set_permissions', user, '.*', '.*', '.*'])
-
-def enable_implicit_connect():
-    switch_config(implicit_connect='true', default_user='[{login, "guest"}, {passcode, "guest"}]')
-
-def disable_implicit_connect():
-    switch_config(implicit_connect='false', default_user='[]')
-
-def enable_default_user():
-    switch_config(default_user='[{login, "guest"}, {passcode, "guest"}]')
-
-def disable_default_user():
-    switch_config(default_user='[]')
-
-def switch_config(implicit_connect='', default_user=''):
-    cmd = 'application:stop(rabbitmq_stomp),'
-    if implicit_connect:
-        cmd += 'application:set_env(rabbitmq_stomp,implicit_connect,' + implicit_connect + '),'
-    if default_user:
-        cmd += 'application:set_env(rabbitmq_stomp,default_user,' + default_user + '),'
-    cmd += 'application:start(rabbitmq_stomp).'
-    rabbitmqctl(['eval', cmd])
-
-def rabbitmqctl(args):
-    ctl = os.path.normpath(os.path.join(os.getcwd(), sys.argv[0], '../../../../rabbitmq-server/scripts/rabbitmqctl'))
-    cmdline = [ctl, '-n', 'rabbit-test']
-    cmdline.extend(args)
-    subprocess.check_call(cmdline)
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/transactions.py b/rabbitmq-server/plugins-src/rabbitmq-stomp/test/src/transactions.py
deleted file mode 100644 (file)
index d4f166b..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-import unittest
-import stomp
-import base
-import time
-
-class TestTransactions(base.BaseTest):
-
-    def test_tx_commit(self):
-        ''' Test TX with a COMMIT and ensure messages are delivered '''
-        destination = "/exchange/amq.fanout"
-        tx = "test.tx"
-
-        self.listener.reset()
-        self.subscribe_dest(self.conn, destination, None)
-        self.conn.begin(transaction=tx)
-        self.conn.send(destination, "hello!", transaction=tx)
-        self.conn.send(destination, "again!")
-
-        ## should see the second message
-        self.assertTrue(self.listener.await(3))
-        self.assertEquals(1, len(self.listener.messages))
-        self.assertEquals("again!", self.listener.messages[0]['message'])
-
-        ## now look for the first message
-        self.listener.reset()
-        self.conn.commit(transaction=tx)
-        self.assertTrue(self.listener.await(3))
-        self.assertEquals(1, len(self.listener.messages),
-                          "Missing committed message")
-        self.assertEquals("hello!", self.listener.messages[0]['message'])
-
-    def test_tx_abort(self):
-        ''' Test TX with an ABORT and ensure messages are discarded '''
-        destination = "/exchange/amq.fanout"
-        tx = "test.tx"
-
-        self.listener.reset()
-        self.subscribe_dest(self.conn, destination, None)
-        self.conn.begin(transaction=tx)
-        self.conn.send(destination, "hello!", transaction=tx)
-        self.conn.send(destination, "again!")
-
-        ## should see the second message
-        self.assertTrue(self.listener.await(3))
-        self.assertEquals(1, len(self.listener.messages))
-        self.assertEquals("again!", self.listener.messages[0]['message'])
-
-        ## now look for the first message to be discarded
-        self.listener.reset()
-        self.conn.abort(transaction=tx)
-        self.assertFalse(self.listener.await(3))
-        self.assertEquals(0, len(self.listener.messages),
-                          "Unexpected committed message")
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-test/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-test/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/Makefile b/rabbitmq-server/plugins-src/rabbitmq-test/Makefile
deleted file mode 100644 (file)
index 3f11414..0000000
+++ /dev/null
@@ -1,210 +0,0 @@
-.PHONY: all full lite conformance16 update-qpid-testsuite run-qpid-testsuite \
-       prepare restart-app stop-app start-app \
-       start-secondary-app stop-secondary-app \
-       restart-secondary-node cleanup force-snapshot \
-       enable-ha disable-ha
-
-include ../umbrella.mk
-
-BROKER_DIR=../rabbitmq-server
-TEST_DIR=../rabbitmq-java-client
-
-TEST_RABBIT_PORT=5672
-TEST_HARE_PORT=5673
-TEST_RABBIT_SSL_PORT=5671
-TEST_HARE_SSL_PORT=5670
-
-COVER=true
-
-ifeq ($(COVER), true)
-COVER_START=start-cover
-COVER_STOP=stop-cover
-else
-COVER_START=
-COVER_STOP=
-endif
-
-# we actually want to test for ssl above 3.9 (eg >= 3.10), but this
-# comparison is buggy because it doesn't believe 10 > 9, so it doesn't
-# believe 3.10 > 3.9. As a result, we cheat, and use the erts version
-# instead. SSL 3.10 came out with R13B, which included erts 5.7.1, so
-# we require > 5.7.0.
-SSL_VERIFY=$(shell if [ $$(erl -noshell -eval 'io:format(erlang:system_info(version)), halt().') \> "5.7.0" ]; then echo "true"; else echo "false"; fi)
-ifeq (true,$(SSL_VERIFY))
-SSL_VERIFY_OPTION :={verify,verify_peer},{fail_if_no_peer_cert,false}
-else
-SSL_VERIFY_OPTION :={verify_code,1}
-endif
-export SSL_CERTS_DIR := $(realpath certs)
-export PASSWORD := test
-RABBIT_BROKER_OPTIONS := "-rabbit ssl_listeners [{\\\"0.0.0.0\\\",$(TEST_RABBIT_SSL_PORT)}] -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},$(SSL_VERIFY_OPTION)] -rabbit auth_mechanisms ['PLAIN','AMQPLAIN','EXTERNAL','RABBIT-CR-DEMO']"
-HARE_BROKER_OPTIONS := "-rabbit ssl_listeners [{\\\"0.0.0.0\\\",$(TEST_HARE_SSL_PORT)}] -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},$(SSL_VERIFY_OPTION)] -rabbit auth_mechanisms ['PLAIN','AMQPLAIN','EXTERNAL','RABBIT-CR-DEMO']"
-
-TESTS_FAILED := echo '\n============'\
-                    '\nTESTS FAILED'\
-                    '\n============\n'
-
-all: full test
-
-full:
-       OK=true && \
-       $(MAKE) prepare && \
-       { $(MAKE) -C $(BROKER_DIR) run-tests || { OK=false; $(TESTS_FAILED); } } && \
-       { $(MAKE) run-qpid-testsuite || { OK=false; $(TESTS_FAILED); } } && \
-       { ( cd $(TEST_DIR) && MAKE=$(MAKE) ant test-suite ) || { OK=false; $(TESTS_FAILED); } } && \
-       $(MAKE) cleanup && { $$OK || $(TESTS_FAILED); } && $$OK
-
-unit:
-       OK=true && \
-       $(MAKE) prepare && \
-       { $(MAKE) -C $(BROKER_DIR) run-tests || OK=false; } && \
-       $(MAKE) cleanup && $$OK
-
-lite:
-       OK=true && \
-       $(MAKE) prepare && \
-       { $(MAKE) -C $(BROKER_DIR) run-tests || OK=false; } && \
-       { ( cd $(TEST_DIR) && MAKE=$(MAKE) ant test-suite ) || OK=false; } && \
-       $(MAKE) cleanup && $$OK
-
-conformance16:
-       OK=true && \
-       $(MAKE) prepare && \
-       { $(MAKE) -C $(BROKER_DIR) run-tests || OK=false; } && \
-       { ( cd $(TEST_DIR) && MAKE=$(MAKE) ant test-suite ) || OK=false; } && \
-       $(MAKE) cleanup && $$OK
-
-qpid_testsuite:
-       $(MAKE) update-qpid-testsuite
-
-update-qpid-testsuite:
-       svn co -r 906960 http://svn.apache.org/repos/asf/qpid/trunk/qpid/python qpid_testsuite
-       # hg clone http://rabbit-hg.eng.vmware.com/mirrors/qpid_testsuite
-       - patch -N -r - -p0 -d qpid_testsuite/ < qpid_patch
-
-prepare-qpid-patch:
-       cd qpid_testsuite && svn diff > ../qpid_patch && cd ..
-
-run-qpid-testsuite: qpid_testsuite
-       AMQP_SPEC=../rabbitmq-docs/specs/amqp0-8.xml qpid_testsuite/qpid-python-test -m tests_0-8 -I rabbit_failing.txt
-       AMQP_SPEC=../rabbitmq-docs/specs/amqp0-9-1.xml qpid_testsuite/qpid-python-test -m tests_0-9 -I rabbit_failing.txt
-
-clean:
-       rm -rf qpid_testsuite
-
-prepare: create_ssl_certs
-       $(MAKE) -C $(BROKER_DIR) \
-               RABBITMQ_NODENAME=hare \
-               RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \
-               RABBITMQ_NODE_PORT=${TEST_HARE_PORT} \
-               RABBITMQ_SERVER_START_ARGS=$(HARE_BROKER_OPTIONS) \
-               RABBITMQ_CONFIG_FILE=/does-not-exist \
-               RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \
-               stop-node cleandb start-background-node
-       $(MAKE) -C $(BROKER_DIR) \
-               RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \
-               RABBITMQ_NODE_PORT=${TEST_RABBIT_PORT} \
-               RABBITMQ_SERVER_START_ARGS=$(RABBIT_BROKER_OPTIONS) \
-               RABBITMQ_CONFIG_FILE=/does-not-exist \
-               RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \
-               stop-node cleandb start-background-node ${COVER_START} start-rabbit-on-node
-       $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare start-rabbit-on-node
-
-start-app:
-       $(MAKE) -C $(BROKER_DIR) \
-               RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \
-               RABBITMQ_NODE_PORT=${TEST_RABBIT_PORT} \
-               RABBITMQ_SERVER_START_ARGS=$(RABBIT_BROKER_OPTIONS) \
-               RABBITMQ_CONFIG_FILE=/does-not-exist \
-               RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \
-               start-rabbit-on-node
-
-stop-app:
-       $(MAKE) -C $(BROKER_DIR) stop-rabbit-on-node
-
-restart-app: stop-app start-app
-
-start-secondary-app:
-       $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare start-rabbit-on-node
-
-stop-secondary-app:
-       $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare stop-rabbit-on-node
-
-restart-secondary-node:
-       $(MAKE) -C $(BROKER_DIR) \
-               RABBITMQ_NODENAME=hare \
-               RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \
-               RABBITMQ_NODE_PORT=${TEST_HARE_PORT} \
-               RABBITMQ_SERVER_START_ARGS=$(HARE_BROKER_OPTIONS) \
-               RABBITMQ_CONFIG_FILE=/does-not-exist \
-               RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \
-               stop-node start-background-node
-       $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare start-rabbit-on-node
-
-force-snapshot:
-       $(MAKE) -C $(BROKER_DIR) force-snapshot
-
-set-resource-alarm:
-       $(MAKE) -C $(BROKER_DIR) set-resource-alarm SOURCE=$(SOURCE)
-
-clear-resource-alarm:
-       $(MAKE) -C $(BROKER_DIR) clear-resource-alarm SOURCE=$(SOURCE)
-
-enable-ha:
-       $(BROKER_DIR)/scripts/rabbitmqctl set_policy HA \
-               ".*" '{"ha-mode": "all"}'
-
-disable-ha:
-       $(BROKER_DIR)/scripts/rabbitmqctl clear_policy HA
-
-cleanup:
-       -$(MAKE) -C $(BROKER_DIR) \
-               RABBITMQ_NODENAME=hare \
-               RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \
-               RABBITMQ_NODE_PORT=${TEST_HARE_PORT} \
-               RABBITMQ_SERVER_START_ARGS=$(HARE_BROKER_OPTIONS) \
-               RABBITMQ_CONFIG_FILE=/does-not-exist \
-               RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \
-               stop-rabbit-on-node stop-node
-       -$(MAKE) -C $(BROKER_DIR) \
-               RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \
-               RABBITMQ_NODE_PORT=${TEST_RABBIT_PORT} \
-               RABBITMQ_SERVER_START_ARGS=$(RABBIT_BROKER_OPTIONS) \
-               RABBITMQ_CONFIG_FILE=/does-not-exist \
-               RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \
-               stop-rabbit-on-node ${COVER_STOP} stop-node
-
-define compare_version
-$(shell awk 'BEGIN {
-       split("$(1)", v1, "\.");
-       version1 = v1[1] * 1000000 + v1[2] * 10000 + v1[3] * 100 + v1[4];
-
-       split("$(2)", v2, "\.");
-       version2 = v2[1] * 1000000 + v2[2] * 10000 + v2[3] * 100 + v2[4];
-
-       if (version1 $(3) version2) {
-               print "true";
-       } else {
-               print "false";
-       }
-}')
-endef
-
-ERLANG_SSL_VER = $(shell erl -noshell -eval '\
-       ok = application:load(ssl), \
-       {ok, VSN} = application:get_key(ssl, vsn), \
-       io:format("~s~n", [VSN]), \
-       halt(0).')
-MINIMUM_ERLANG_SSL_VER = 5.3
-
-ifeq ($(call compare_version,$(ERLANG_SSL_VER),$(MINIMUM_ERLANG_SSL_VER),>=),true)
-create_ssl_certs:
-       $(MAKE) -C certs DIR=$(SSL_CERTS_DIR) clean all
-else
-create_ssl_certs:
-       @# Skip SSL certs if Erlang is older than R16B01 (ssl 5.3).
-       $(MAKE) -C certs DIR=$(SSL_CERTS_DIR) clean
-       @echo "WARNING: Skip SSL certs creation; Erlang's SSL application is too" \
-           "old ($(ERLANG_SSL_VER) < $(MINIMUM_ERLANG_SSL_VER)) and SSL support" \
-           "is disabled in RabbitMQ"
-endif
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/README b/rabbitmq-server/plugins-src/rabbitmq-test/README
deleted file mode 100644 (file)
index 9b19505..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-Useful targets:
-
-$ make unit # runs the Erlang unit tests
-$ make lite # runs the Erlang unit tests and the Java client / functional tests
-$ make full # runs both the above plus the QPid test suite
-$ make test # runs the Erlang multi-node integration tests
-$ make all  # runs all of the above
-
-The multi-node tests take a long time, so you might want to run a subset:
-
-$ make test FILTER=dynamic_ha               # <- run just one suite
-$ make test FILTER=dynamic_ha:change_policy # <- run just one test
-
-The multi-node tests also default to coverage off, to turn it on:
-
-$ make test COVER=true
-
-This repository is not related to plugin tests; run "make test" in a
-plugin directory to test that plugin.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile b/rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile
deleted file mode 100644 (file)
index 32db63f..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-OPENSSL=openssl
-
-ifndef DIR
-DIR := .
-endif
-
-ifdef PASSWORD
-P12PASS := true
-else
-P12PASS := @echo No PASSWORD defined. && false
-endif
-
-.PRECIOUS: %/testca
-.PHONY: %/clean target all p12pass
-
-all: client server
-
-client: p12pass
-       echo $(DIR)
-       $(MAKE) target DIR=$(DIR) TARGET=client EXTENSIONS=client_ca_extensions
-
-server: p12pass
-       $(MAKE) target DIR=$(DIR) TARGET=server EXTENSIONS=server_ca_extensions
-
-p12pass:
-       $(P12PASS)
-
-target: $(DIR)/testca
-       mkdir $(DIR)/$(TARGET)
-       { ( cd $(DIR)/$(TARGET) && \
-           openssl genrsa -out key.pem 2048 &&\
-           openssl req -new -key key.pem -out req.pem -outform PEM\
-               -subj /CN=$$(hostname)/O=$(TARGET)/L=$$$$/ -nodes &&\
-           cd ../testca && \
-           openssl ca -config openssl.cnf -in ../$(TARGET)/req.pem -out \
-             ../$(TARGET)/cert.pem -notext -batch -extensions \
-             $(EXTENSIONS) && \
-           cd ../$(TARGET) && \
-           openssl pkcs12 -export -out keycert.p12 -in cert.pem -inkey key.pem \
-             -passout pass:$(PASSWORD) ) || (rm -rf $(DIR)/$(TARGET) && false); }
-
-$(DIR)/testca:
-       mkdir $(DIR)/testca
-       cp openssl.cnf $(DIR)/testca/openssl.cnf
-       { ( cd $(DIR)/testca && \
-           mkdir certs private && \
-           chmod 700 private && \
-           echo 01 > serial && \
-           touch index.txt && \
-           openssl req -x509 -config openssl.cnf -newkey rsa:2048 -days 365 \
-             -out cacert.pem -outform PEM -subj /CN=MyTestCA/L=$$$$/ -nodes && \
-           openssl x509 -in cacert.pem -out cacert.cer -outform DER ) \
-         || (rm -rf $@ && false); }
-
-clean:
-       rm -rf $(DIR)/testca
-       rm -rf $(DIR)/server
-       rm -rf $(DIR)/client
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf b/rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf
deleted file mode 100644 (file)
index 93ffb2f..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-[ ca ]
-default_ca = testca
-
-[ testca ]
-dir = .
-certificate = $dir/cacert.pem
-database = $dir/index.txt
-new_certs_dir = $dir/certs
-private_key = $dir/private/cakey.pem
-serial = $dir/serial
-
-default_crl_days = 7
-default_days = 365
-default_md = sha1
-
-policy = testca_policy
-x509_extensions = certificate_extensions
-
-[ testca_policy ]
-commonName = supplied
-stateOrProvinceName = optional
-countryName = optional
-emailAddress = optional
-organizationName = optional
-organizationalUnitName = optional
-domainComponent = optional
-
-[ certificate_extensions ]
-basicConstraints = CA:false
-
-[ req ]
-default_bits = 2048
-default_keyfile = ./private/cakey.pem
-default_md = sha1
-prompt = yes
-distinguished_name = root_ca_distinguished_name
-x509_extensions = root_ca_extensions
-
-[ root_ca_distinguished_name ]
-commonName = hostname
-
-[ root_ca_extensions ]
-basicConstraints = CA:true
-keyUsage = keyCertSign, cRLSign
-
-[ client_ca_extensions ]
-basicConstraints = CA:false
-keyUsage = digitalSignature
-extendedKeyUsage = 1.3.6.1.5.5.7.3.2
-
-[ server_ca_extensions ]
-basicConstraints = CA:false
-keyUsage = keyEncipherment
-extendedKeyUsage = 1.3.6.1.5.5.7.3.1
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/package.mk b/rabbitmq-server/plugins-src/rabbitmq-test/package.mk
deleted file mode 100644 (file)
index 161d016..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-DEPS:=rabbitmq-erlang-client
-FILTER:=all
-COVER:=false
-WITH_BROKER_TEST_COMMANDS:=rabbit_test_runner:run_in_broker(\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\")
-
-## Require R15B to compile inet_proxy_dist since it requires includes
-## introduced there.
-ifeq ($(shell erl -noshell -eval 'io:format([list_to_integer(X) || X <- string:tokens(erlang:system_info(version), ".")] >= [5,9]), halt().'),true)
-STANDALONE_TEST_COMMANDS:=rabbit_test_runner:run_multi(\"$(UMBRELLA_BASE_DIR)/rabbitmq-server\",\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\",$(COVER),none)
-PACKAGE_ERLC_OPTS+=-Derlang_r15b_or_later
-endif
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py b/rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py
deleted file mode 100644 (file)
index 16388a6..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-import os
-
-AMQP_SPEC_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "../rabbitmq-docs/specs")
-amqp_spec = os.path.join(AMQP_SPEC_DIR, "amqp.0-10-qpid-errata.xml")
-amqp_spec_0_8 = os.path.join(AMQP_SPEC_DIR, "amqp0-8.xml")
-amqp_spec_0_9 = os.path.join(AMQP_SPEC_DIR, "amqp0-9.xml")
-amqp_spec = 'file://'+os.path.join(AMQP_SPEC_DIR, 'amqp.0-10.xml')
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch b/rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch
deleted file mode 100644 (file)
index 2c4b590..0000000
+++ /dev/null
@@ -1,142 +0,0 @@
-Index: tests_0-8/basic.py
-===================================================================
---- tests_0-8/basic.py (revision 906960)
-+++ tests_0-8/basic.py (working copy)
-@@ -98,7 +98,7 @@
-             channel.basic_consume(queue="")
-             self.fail("Expected failure when consuming from unspecified queue")
-         except Closed, e:
--            self.assertConnectionException(530, e.args[0])
-+            self.assertChannelException(404, e.args[0])
-     def test_consume_unique_consumers(self):
-         """
-Index: tests_0-8/exchange.py
-===================================================================
---- tests_0-8/exchange.py      (revision 906960)
-+++ tests_0-8/exchange.py      (working copy)
-@@ -138,8 +138,6 @@
-         # Test automatic binding by queue name.
-         self.queue_declare(queue="d")
-         self.assertPublishConsume(queue="d", routing_key="d")
--        # Test explicit bind to default queue
--        self.verifyDirectExchange("")
- # TODO aconway 2006-09-27: Fill in empty tests:
-@@ -318,7 +316,7 @@
-             self.channel.exchange_declare(exchange="test_different_declared_type_exchange", type="topic")
-             self.fail("Expected 530 for redeclaration of exchange with different type.")
-         except Closed, e:
--            self.assertConnectionException(530, e.args[0])
-+            self.assertChannelException(406, e.args[0])
-         #cleanup    
-         other = self.connect()
-         c2 = other.channel(1)
-Index: tests_0-8/queue.py
-===================================================================
---- tests_0-8/queue.py (revision 906960)
-+++ tests_0-8/queue.py (working copy)
-@@ -37,14 +37,10 @@
-         channel.basic_publish(exchange="test-exchange", routing_key="key", content=Content("two"))
-         channel.basic_publish(exchange="test-exchange", routing_key="key", content=Content("three"))
--        #check that the queue now reports 3 messages:
--        reply = channel.queue_declare(queue="test-queue")
--        self.assertEqual(3, reply.message_count)
--
-         #now do the purge, then test that three messages are purged and the count drops to 0
-         reply = channel.queue_purge(queue="test-queue");
-         self.assertEqual(3, reply.message_count)        
--        reply = channel.queue_declare(queue="test-queue")
-+        reply = channel.queue_declare(queue="test-queue", exclusive=True)
-         self.assertEqual(0, reply.message_count)
-         #send a further message and consume it, ensuring that the other messages are really gone
-@@ -71,7 +67,7 @@
-             channel.queue_purge()
-             self.fail("Expected failure when purging unspecified queue")
-         except Closed, e:
--            self.assertConnectionException(530, e.args[0])
-+            self.assertChannelException(404, e.args[0])
-         #cleanup    
-         other = self.connect()
-@@ -174,11 +170,7 @@
-         #check attempted deletion of non-existant queue is handled correctly:    
-         channel = self.client.channel(2)
-         channel.channel_open()
--        try:
--            channel.queue_delete(queue="i-dont-exist", if_empty="True")
--            self.fail("Expected delete of non-existant queue to fail")
--        except Closed, e:
--            self.assertChannelException(404, e.args[0])
-+        channel.queue_delete(queue="i-dont-exist", if_empty="True")
-         
-Index: qpid/codec.py
-===================================================================
---- qpid/codec.py      (revision 906960)
-+++ qpid/codec.py      (working copy)
-@@ -76,6 +76,7 @@
-     if not self.types:
-       self.typecode(ord('S'), "longstr")
-       self.typecode(ord('I'), "long")
-+      self.typecode(ord('t'), "bool")
-   def typecode(self, code, type):
-     self.types[code] = type
-@@ -206,6 +207,22 @@
-     """
-     return self.unpack("!B")
-+  def encode_bool(self, b):
-+    """
-+    encodes bool (8 bits) data 't' in network byte order
-+    """
-+
-+    if ((b is not True) and (b is not False)):
-+        raise ValueError('Valid range of bool is True or False')
-+
-+    self.pack("!B", int(b))
-+
-+  def decode_bool(self):
-+    """
-+    decodes a bool (8 bits) encoded in network byte order
-+    """
-+    return bool(self.unpack("!B"))
-+
-   def encode_short(self, o):
-     """
-     encodes short (16 bits) data 'o' in network byte order
-Index: qpid/testlib.py
-===================================================================
---- qpid/testlib.py    (revision 906960)
-+++ qpid/testlib.py    (working copy)
-@@ -67,8 +67,7 @@
-         if not self.client.closed:
-             self.client.channel(0).connection_close(reply_code=200)
--        else:
--            self.client.close()
-+        self.client.close()
-     def connect(self, host=None, port=None, user=None, password=None, tune_params=None):
-         """Create a new connction, return the Client object"""
-Index: qpid_config.py
-===================================================================
---- qpid_config.py     (revision 906960)
-+++ qpid_config.py     (working copy)
-@@ -19,7 +19,8 @@
- import os
--AMQP_SPEC_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "specs")
-+AMQP_SPEC_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "../rabbitmq-docs/specs")
- amqp_spec = os.path.join(AMQP_SPEC_DIR, "amqp.0-10-qpid-errata.xml")
--amqp_spec_0_8 = os.path.join(AMQP_SPEC_DIR, "amqp.0-8.xml")
--amqp_spec_0_9 = os.path.join(AMQP_SPEC_DIR, "amqp.0-9.xml")
-+amqp_spec_0_8 = os.path.join(AMQP_SPEC_DIR, "amqp0-8.xml")
-+amqp_spec_0_9 = os.path.join(AMQP_SPEC_DIR, "amqp0-9.xml")
-+amqp_spec = 'file://'+os.path.join(AMQP_SPEC_DIR, 'amqp.0-10.xml')
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt b/rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt
deleted file mode 100644 (file)
index be4eccf..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-tests_0-8.basic.BasicTests.test_ack
-tests_0-8.basic.BasicTests.test_consume_no_local
-tests_0-8.basic.BasicTests.test_qos_prefetch_count
-tests_0-8.basic.BasicTests.test_qos_prefetch_size
-tests_0-8.broker.BrokerTests.test_basic_delivery_immediate
-tests_0-8.broker.BrokerTests.test_channel_flow
-tests_0-8.tx.TxTests.test_auto_rollback
-tests_0-8.tx.TxTests.test_rollback
-tests_0-9.query.*
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl
deleted file mode 100644 (file)
index 847ef2e..0000000
+++ /dev/null
@@ -1,199 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(inet_proxy_dist).
-
-%% A distribution plugin that uses the usual inet_tcp_dist but allows
-%% insertion of a proxy at the receiving end.
-
-%% inet_*_dist "behaviour"
--export([listen/1, accept/1, accept_connection/5,
-        setup/5, close/1, select/1, is_node_name/1]).
-
-%% For copypasta from inet_tcp_dist
--export([do_setup/6]).
--import(error_logger,[error_msg/2]).
-
--define(REAL, inet_tcp_dist).
-
-%%----------------------------------------------------------------------------
-
-listen(Name)       -> ?REAL:listen(Name).
-select(Node)       -> ?REAL:select(Node).
-accept(Listen)     -> ?REAL:accept(Listen).
-close(Socket)      -> ?REAL:close(Socket).
-is_node_name(Node) -> ?REAL:is_node_name(Node).
-
-accept_connection(AcceptPid, Socket, MyNode, Allowed, SetupTime) ->
-    ?REAL:accept_connection(AcceptPid, Socket, MyNode, Allowed, SetupTime).
-
-%% This is copied from inet_tcp_dist, in order to change the
-%% output of erl_epmd:port_please/2.
-
--ifdef(erlang_r15b_or_later).
-
--include_lib("kernel/include/net_address.hrl").
--include_lib("kernel/include/dist_util.hrl").
-
-setup(Node, Type, MyNode, LongOrShortNames,SetupTime) ->
-    spawn_opt(?MODULE, do_setup, 
-             [self(), Node, Type, MyNode, LongOrShortNames, SetupTime],
-             [link, {priority, max}]).
-
-do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
-    ?trace("~p~n",[{inet_tcp_dist,self(),setup,Node}]),
-    [Name, Address] = splitnode(Node, LongOrShortNames),
-    case inet:getaddr(Address, inet) of
-       {ok, Ip} ->
-           Timer = dist_util:start_timer(SetupTime),
-           case erl_epmd:port_please(Name, Ip) of
-               {port, TcpPort, Version} ->
-                   ?trace("port_please(~p) -> version ~p~n", 
-                          [Node,Version]),
-                   dist_util:reset_timer(Timer),
-                    %% Modification START
-                    ProxyPort = case TcpPort >= 25672 andalso TcpPort < 25700
-                                    andalso inet_tcp_proxy:is_enabled() of
-                                    true  -> TcpPort + 5000;
-                                    false -> TcpPort
-                                end,
-                   case inet_tcp:connect(Ip, ProxyPort, 
-                                         [{active, false}, 
-                                          {packet,2}]) of
-                       {ok, Socket} ->
-                            {ok, {_, SrcPort}} = inet:sockname(Socket),
-                            ok = inet_tcp_proxy_manager:register(
-                                   node(), Node, SrcPort, TcpPort, ProxyPort),
-                    %% Modification END
-                           HSData = #hs_data{
-                             kernel_pid = Kernel,
-                             other_node = Node,
-                             this_node = MyNode,
-                             socket = Socket,
-                             timer = Timer,
-                             this_flags = 0,
-                             other_version = Version,
-                             f_send = fun inet_tcp:send/2,
-                             f_recv = fun inet_tcp:recv/3,
-                             f_setopts_pre_nodeup = 
-                             fun(S) ->
-                                     inet:setopts
-                                       (S, 
-                                        [{active, false},
-                                         {packet, 4},
-                                         nodelay()])
-                             end,
-                             f_setopts_post_nodeup = 
-                             fun(S) ->
-                                     inet:setopts
-                                       (S, 
-                                        [{active, true},
-                                         {deliver, port},
-                                         {packet, 4},
-                                         nodelay()])
-                             end,
-                             f_getll = fun inet:getll/1,
-                             f_address = 
-                             fun(_,_) ->
-                                     #net_address{
-                                  address = {Ip,TcpPort},
-                                  host = Address,
-                                  protocol = tcp,
-                                  family = inet}
-                             end,
-                             mf_tick = fun inet_tcp_dist:tick/1,
-                             mf_getstat = fun inet_tcp_dist:getstat/1,
-                             request_type = Type
-                            },
-                           dist_util:handshake_we_started(HSData);
-                       R ->
-                            io:format("~p failed! ~p~n", [node(), R]),
-                           %% Other Node may have closed since 
-                           %% port_please !
-                           ?trace("other node (~p) "
-                                  "closed since port_please.~n", 
-                                  [Node]),
-                           ?shutdown(Node)
-                   end;
-               _ ->
-                   ?trace("port_please (~p) "
-                          "failed.~n", [Node]),
-                   ?shutdown(Node)
-           end;
-       _Other ->
-           ?trace("inet_getaddr(~p) "
-                  "failed (~p).~n", [Node,_Other]),
-           ?shutdown(Node)
-    end.
-
-%% If Node is illegal terminate the connection setup!!
-splitnode(Node, LongOrShortNames) ->
-    case split_node(atom_to_list(Node), $@, []) of
-       [Name|Tail] when Tail =/= [] ->
-           Host = lists:append(Tail),
-           case split_node(Host, $., []) of
-               [_] when LongOrShortNames =:= longnames ->
-                   error_msg("** System running to use "
-                             "fully qualified "
-                             "hostnames **~n"
-                             "** Hostname ~s is illegal **~n",
-                             [Host]),
-                   ?shutdown(Node);
-               L when length(L) > 1, LongOrShortNames =:= shortnames ->
-                   error_msg("** System NOT running to use fully qualified "
-                             "hostnames **~n"
-                             "** Hostname ~s is illegal **~n",
-                             [Host]),
-                   ?shutdown(Node);
-               _ ->
-                   [Name, Host]
-           end;
-       [_] ->
-           error_msg("** Nodename ~p illegal, no '@' character **~n",
-                     [Node]),
-           ?shutdown(Node);
-       _ ->
-           error_msg("** Nodename ~p illegal **~n", [Node]),
-           ?shutdown(Node)
-    end.
-
-split_node([Chr|T], Chr, Ack) -> [lists:reverse(Ack)|split_node(T, Chr, [])];
-split_node([H|T], Chr, Ack)   -> split_node(T, Chr, [H|Ack]);
-split_node([], _, Ack)        -> [lists:reverse(Ack)].
-
-%% we may not always want the nodelay behaviour
-%% for performance reasons
-
-nodelay() ->
-    case application:get_env(kernel, dist_nodelay) of
-       undefined ->
-           {nodelay, true};
-       {ok, true} ->
-           {nodelay, true};
-       {ok, false} ->
-           {nodelay, false};
-       _ ->
-           {nodelay, true}
-    end.
-
--else.
-
-setup(_Node, _Type, _MyNode, _LongOrShortNames, _SetupTime) ->
-    exit(erlang_r15b_required).
-
-do_setup(_Kernel, _Node, _Type, _MyNode, _LongOrShortNames, _SetupTime) ->
-    exit(erlang_r15b_required).
-
--endif.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl
deleted file mode 100644 (file)
index 28d58e0..0000000
+++ /dev/null
@@ -1,106 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(inet_tcp_proxy).
-
-%% A TCP proxy for insertion into the Erlang distribution mechanism,
-%% which allows us to simulate network partitions.
-
--export([start/0, reconnect/1, is_enabled/0, allow/1, block/1]).
-
--define(TABLE, ?MODULE).
-
-%% This can't start_link because there's no supervision hierarchy we
-%% can easily fit it into (we need to survive all application
-%% restarts). So we have to do some horrible error handling.
-
-start() ->
-    spawn(error_handler(fun go/0)),
-    ok.
-
-reconnect(Nodes) ->
-    [erlang:disconnect_node(N) || N <- Nodes, N =/= node()],
-    ok.
-
-is_enabled() ->
-    lists:member(?TABLE, ets:all()).
-
-allow(Node) -> ets:delete(?TABLE, Node).
-block(Node) -> ets:insert(?TABLE, {Node, block}).
-
-%%----------------------------------------------------------------------------
-
-error_handler(Thunk) ->
-    fun () ->
-            try
-                Thunk()
-            catch _:{{nodedown, _}, _} ->
-                    %% The only other node we ever talk to is the test
-                    %% runner; if that's down then the test is nearly
-                    %% over; die quietly.
-                    ok;
-                  _:X ->
-                    io:format(user, "TCP proxy died with ~p~n At ~p~n",
-                              [X, erlang:get_stacktrace()]),
-                    erlang:halt(1)
-            end
-    end.
-
-go() ->
-    ets:new(?TABLE, [public, named_table]),
-    {ok, Port} = application:get_env(kernel, inet_dist_listen_min),
-    ProxyPort = Port + 5000,
-    {ok, Sock} = gen_tcp:listen(ProxyPort, [inet,
-                                            {reuseaddr, true}]),
-    accept_loop(Sock, Port).
-
-accept_loop(ListenSock, Port) ->
-    {ok, Sock} = gen_tcp:accept(ListenSock),
-    Proxy = spawn(error_handler(fun() -> run_it(Sock, Port) end)),
-    ok = gen_tcp:controlling_process(Sock, Proxy),
-    accept_loop(ListenSock, Port).
-
-run_it(SockIn, Port) ->
-    case {inet:peername(SockIn), inet:sockname(SockIn)} of
-        {{ok, {_Addr, SrcPort}}, {ok, {Addr, _OtherPort}}} ->
-            {ok, Remote, This} = inet_tcp_proxy_manager:lookup(SrcPort),
-            case node() of
-                This  -> ok;
-                _     -> exit({not_me, node(), This})
-            end,
-            {ok, SockOut} = gen_tcp:connect(Addr, Port, [inet]),
-            run_loop({SockIn, SockOut}, Remote, []);
-        _ ->
-            ok
-    end.
-
-run_loop(Sockets, RemoteNode, Buf0) ->
-    Block = [{RemoteNode, block}] =:= ets:lookup(?TABLE, RemoteNode),
-    receive
-        {tcp, Sock, Data} ->
-            Buf = [Data | Buf0],
-            case Block of
-                false -> gen_tcp:send(other(Sock, Sockets), lists:reverse(Buf)),
-                         run_loop(Sockets, RemoteNode, []);
-                true  -> run_loop(Sockets, RemoteNode, Buf)
-            end;
-        {tcp_closed, Sock} ->
-            gen_tcp:close(other(Sock, Sockets));
-        X ->
-            exit({weirdness, X})
-    end.
-
-other(A, {A, B}) -> B;
-other(B, {A, B}) -> A.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl
deleted file mode 100644 (file)
index a79ea9f..0000000
+++ /dev/null
@@ -1,107 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(inet_tcp_proxy_manager).
-
-%% The TCP proxies need to decide whether to block based on the node
-%% they're running on, and the node connecting to them. The trouble
-%% is, they don't have an easy way to determine the latter. Therefore
-%% when A connects to B we register the source port used by A here, so
-%% that B can later look it up and find out who A is without having to
-%% sniff the distribution protocol.
-%%
-%% That does unfortunately mean that we need a central control
-%% thing. We assume here it's running on the node called
-%% 'standalone_test' since that's where tests are orchestrated from.
-%%
-%% Yes, this leaks. For its intended lifecycle, that's fine.
-
--behaviour(gen_server).
-
--export([start_link/0, register/5, lookup/1]).
-
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
-         code_change/3]).
-
--define(NODE, standalone_test).
-
--record(state, {ports, pending}).
-
-start_link() ->
-    Node = node(),
-    Node = controller_node(),
-    gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-register(_From, _To, _SrcPort, Port, Port) ->
-    %% No proxy, don't register
-    ok;
-register(From, To, SrcPort, _Port, _ProxyPort) ->
-    gen_server:call(name(), {register, From, To, SrcPort}, infinity).
-
-lookup(SrcPort) ->
-    gen_server:call(name(), {lookup, SrcPort}, infinity).
-
-controller_node() ->
-    rabbit_nodes:make(atom_to_list(?NODE)).
-
-name() ->
-    {?MODULE, controller_node()}.
-
-%%----------------------------------------------------------------------------
-
-init([]) ->
-    net_kernel:monitor_nodes(true),
-    {ok, #state{ports   = dict:new(),
-                pending = []}}.
-
-handle_call({register, FromNode, ToNode, SrcPort}, _From,
-            State = #state{ports   = Ports,
-                           pending = Pending}) ->
-    {Notify, Pending2} =
-        lists:partition(fun ({P, _}) -> P =:= SrcPort end, Pending),
-    [gen_server:reply(From, {ok, FromNode, ToNode}) || {_, From} <- Notify],
-    {reply, ok,
-     State#state{ports   = dict:store(SrcPort, {FromNode, ToNode}, Ports),
-                 pending = Pending2}};
-
-handle_call({lookup, SrcPort}, From,
-            State = #state{ports = Ports, pending = Pending}) ->
-    case dict:find(SrcPort, Ports) of
-        {ok, {FromNode, ToNode}} ->
-            {reply, {ok, FromNode, ToNode}, State};
-        error ->
-            {noreply, State#state{pending = [{SrcPort, From} | Pending]}}
-    end;
-
-handle_call(_Req, _From, State) ->
-    {reply, unknown_request, State}.
-
-handle_cast(_C, State) ->
-    {noreply, State}.
-
-handle_info({nodedown, Node}, State = #state{ports = Ports}) ->
-    Ports1 = dict:filter(
-               fun (_, {From, To}) ->
-                       Node =/= From andalso Node =/= To
-               end, Ports),
-    {noreply, State#state{ports = Ports1}};
-
-handle_info(_I, State) ->
-    {noreply, State}.
-
-terminate(_Reason, _State) ->
-    ok.
-
-code_change(_, State, _) -> {ok, State}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl
deleted file mode 100644 (file)
index f11d8d4..0000000
+++ /dev/null
@@ -1,114 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(rabbit_ha_test_consumer).
-
--include_lib("amqp_client/include/amqp_client.hrl").
-
--export([await_response/1, create/5, start/6]).
-
-await_response(ConsumerPid) ->
-    case receive {ConsumerPid, Response} -> Response end of
-        {error, Reason}  -> erlang:error(Reason);
-        ok               -> ok
-    end.
-
-create(Channel, Queue, TestPid, CancelOnFailover, ExpectingMsgs) ->
-    ConsumerPid = spawn_link(?MODULE, start,
-                             [TestPid, Channel, Queue, CancelOnFailover,
-                              ExpectingMsgs + 1, ExpectingMsgs]),
-    amqp_channel:subscribe(
-      Channel, consume_method(Queue, CancelOnFailover), ConsumerPid),
-    ConsumerPid.
-
-start(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume) ->
-    error_logger:info_msg("consumer ~p on ~p awaiting ~w messages "
-                          "(lowest seen = ~w, cancel-on-failover = ~w)~n",
-                          [self(), Channel, MsgsToConsume, LowestSeen,
-                           CancelOnFailover]),
-    run(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume).
-
-run(TestPid, _Channel, _Queue, _CancelOnFailover, _LowestSeen, 0) ->
-    consumer_reply(TestPid, ok);
-run(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume) ->
-    receive
-        #'basic.consume_ok'{} ->
-            run(TestPid, Channel, Queue,
-                CancelOnFailover, LowestSeen, MsgsToConsume);
-        {Delivery = #'basic.deliver'{ redelivered = Redelivered },
-         #amqp_msg{payload = Payload}} ->
-            MsgNum = list_to_integer(binary_to_list(Payload)),
-
-            ack(Delivery, Channel),
-
-            %% we can receive any message we've already seen and,
-            %% because of the possibility of multiple requeuings, we
-            %% might see these messages in any order. If we are seeing
-            %% a message again, we don't decrement the MsgsToConsume
-            %% counter.
-            if
-                MsgNum + 1 == LowestSeen ->
-                    run(TestPid, Channel, Queue,
-                        CancelOnFailover, MsgNum, MsgsToConsume - 1);
-                MsgNum >= LowestSeen ->
-                    error_logger:info_msg(
-                      "consumer ~p on ~p ignoring redeliverd msg ~p~n",
-                      [self(), Channel, MsgNum]),
-                    true = Redelivered, %% ASSERTION
-                    run(TestPid, Channel, Queue,
-                        CancelOnFailover, LowestSeen, MsgsToConsume);
-                true ->
-                    %% We received a message we haven't seen before,
-                    %% but it is not the next message in the expected
-                    %% sequence.
-                    consumer_reply(TestPid,
-                                   {error, {unexpected_message, MsgNum}})
-            end;
-        #'basic.cancel'{} when CancelOnFailover ->
-            error_logger:info_msg("consumer ~p on ~p received basic.cancel: "
-                                  "resubscribing to ~p on ~p~n",
-                                  [self(), Channel, Queue, Channel]),
-            resubscribe(TestPid, Channel, Queue, CancelOnFailover,
-                        LowestSeen, MsgsToConsume);
-        #'basic.cancel'{} ->
-            exit(cancel_received_without_cancel_on_failover)
-    end.
-
-%%
-%% Private API
-%%
-
-resubscribe(TestPid, Channel, Queue, CancelOnFailover, LowestSeen,
-            MsgsToConsume) ->
-    amqp_channel:subscribe(
-      Channel, consume_method(Queue, CancelOnFailover), self()),
-    ok = receive #'basic.consume_ok'{} -> ok
-         end,
-    error_logger:info_msg("re-subscripting consumer ~p on ~p complete "
-                          "(received basic.consume_ok)",
-                          [self(), Channel]),
-    start(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume).
-
-consume_method(Queue, CancelOnFailover) ->
-    Args = [{<<"x-cancel-on-ha-failover">>, bool, CancelOnFailover}],
-    #'basic.consume'{queue     = Queue,
-                     arguments = Args}.
-
-ack(#'basic.deliver'{delivery_tag = DeliveryTag}, Channel) ->
-    amqp_channel:call(Channel, #'basic.ack'{delivery_tag = DeliveryTag}),
-    ok.
-
-consumer_reply(TestPid, Reply) ->
-    TestPid ! {self(), Reply}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl
deleted file mode 100644 (file)
index f3070fe..0000000
+++ /dev/null
@@ -1,119 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(rabbit_ha_test_producer).
-
--export([await_response/1, start/5, create/5]).
-
--include_lib("amqp_client/include/amqp_client.hrl").
-
-await_response(ProducerPid) ->
-    error_logger:info_msg("waiting for producer pid ~p~n", [ProducerPid]),
-    case receive {ProducerPid, Response} -> Response end of
-        ok                -> ok;
-        {error, _} = Else -> exit(Else);
-        Else              -> exit({weird_response, Else})
-    end.
-
-create(Channel, Queue, TestPid, Confirm, MsgsToSend) ->
-    ProducerPid = spawn_link(?MODULE, start, [Channel, Queue, TestPid,
-                                              Confirm, MsgsToSend]),
-    receive
-        {ProducerPid, started} -> ProducerPid
-    end.
-
-start(Channel, Queue, TestPid, Confirm, MsgsToSend) ->
-    ConfirmState =
-        case Confirm of
-            true  -> amqp_channel:register_confirm_handler(Channel, self()),
-                     #'confirm.select_ok'{} =
-                         amqp_channel:call(Channel, #'confirm.select'{}),
-                     gb_trees:empty();
-            false -> none
-        end,
-    TestPid ! {self(), started},
-    error_logger:info_msg("publishing ~w msgs on ~p~n", [MsgsToSend, Channel]),
-    producer(Channel, Queue, TestPid, ConfirmState, MsgsToSend).
-
-%%
-%% Private API
-%%
-
-producer(_Channel, _Queue, TestPid, none, 0) ->
-    TestPid ! {self(), ok};
-producer(Channel, _Queue, TestPid, ConfirmState, 0) ->
-    error_logger:info_msg("awaiting confirms on channel ~p~n", [Channel]),
-    Msg = case drain_confirms(no_nacks, ConfirmState) of
-              no_nacks    -> ok;
-              nacks       -> {error, received_nacks};
-              {Nacks, CS} -> {error, {missing_confirms, Nacks,
-                                      lists:sort(gb_trees:keys(CS))}}
-          end,
-    TestPid ! {self(), Msg};
-
-producer(Channel, Queue, TestPid, ConfirmState, MsgsToSend) ->
-    Method = #'basic.publish'{exchange    = <<"">>,
-                              routing_key = Queue,
-                              mandatory   = false,
-                              immediate   = false},
-
-    ConfirmState1 = maybe_record_confirm(ConfirmState, Channel, MsgsToSend),
-
-    amqp_channel:call(Channel, Method,
-                      #amqp_msg{props = #'P_basic'{delivery_mode = 2},
-                                payload = list_to_binary(
-                                            integer_to_list(MsgsToSend))}),
-
-    producer(Channel, Queue, TestPid, ConfirmState1, MsgsToSend - 1).
-
-maybe_record_confirm(none, _, _) ->
-    none;
-maybe_record_confirm(ConfirmState, Channel, MsgsToSend) ->
-    SeqNo = amqp_channel:next_publish_seqno(Channel),
-    gb_trees:insert(SeqNo, MsgsToSend, ConfirmState).
-
-drain_confirms(Nacks, ConfirmState) ->
-    case gb_trees:is_empty(ConfirmState) of
-        true  -> Nacks;
-        false -> receive
-                     #'basic.ack'{delivery_tag = DeliveryTag,
-                                  multiple     = IsMulti} ->
-                         drain_confirms(Nacks,
-                                        delete_confirms(DeliveryTag, IsMulti,
-                                                        ConfirmState));
-                     #'basic.nack'{delivery_tag = DeliveryTag,
-                                   multiple     = IsMulti} ->
-                         drain_confirms(nacks,
-                                        delete_confirms(DeliveryTag, IsMulti,
-                                                        ConfirmState))
-                 after
-                     60000 -> {Nacks, ConfirmState}
-                 end
-    end.
-
-delete_confirms(DeliveryTag, false, ConfirmState) ->
-    gb_trees:delete(DeliveryTag, ConfirmState);
-delete_confirms(DeliveryTag, true, ConfirmState) ->
-    multi_confirm(DeliveryTag, ConfirmState).
-
-multi_confirm(DeliveryTag, ConfirmState) ->
-    case gb_trees:is_empty(ConfirmState) of
-        true  -> ConfirmState;
-        false -> {Key, _, ConfirmState1} = gb_trees:take_smallest(ConfirmState),
-                 case Key =< DeliveryTag of
-                     true  -> multi_confirm(DeliveryTag, ConfirmState1);
-                     false -> ConfirmState
-                 end
-    end.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl
deleted file mode 100644 (file)
index f286733..0000000
+++ /dev/null
@@ -1,279 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(rabbit_test_configs).
-
--include_lib("amqp_client/include/amqp_client.hrl").
-
--export([enable_plugins/1]).
--export([cluster/2, cluster_ab/1, cluster_abc/1, start_ab/1, start_abc/1]).
--export([start_connections/1, build_cluster/1]).
--export([ha_policy_all/1, ha_policy_two_pos/1]).
--export([start_nodes/2, start_nodes/3, add_to_cluster/2,
-         rabbitmqctl/2, rabbitmqctl_fail/2]).
--export([stop_nodes/1, start_node/1, stop_node/1, kill_node/1, restart_node/1,
-         start_node_fail/1, execute/1]).
--export([cover_work_factor/2]).
-
--import(rabbit_test_util, [set_ha_policy/3, set_ha_policy/4, a2b/1]).
--import(rabbit_misc, [pget/2, pget/3]).
-
--define(INITIAL_KEYS, [cover, base, server, plugins]).
--define(NON_RUNNING_KEYS, ?INITIAL_KEYS ++ [nodename, port, mnesia_dir]).
-
-cluster_ab(InitialCfg)  -> cluster(InitialCfg, [a, b]).
-cluster_abc(InitialCfg) -> cluster(InitialCfg, [a, b, c]).
-start_ab(InitialCfg)    -> start_nodes(InitialCfg, [a, b]).
-start_abc(InitialCfg)   -> start_nodes(InitialCfg, [a, b, c]).
-
-cluster(InitialCfg, NodeNames) ->
-    start_connections(build_cluster(start_nodes(InitialCfg, NodeNames))).
-
-start_nodes(InitialCfg, NodeNames) ->
-    start_nodes(InitialCfg, NodeNames, 5672).
-
-start_nodes(InitialCfg0, NodeNames, FirstPort) ->
-    {ok, Already0} = net_adm:names(),
-    Already = [list_to_atom(N) || {N, _P} <- Already0],
-    [check_node_not_running(Node, Already) || Node <- NodeNames],
-    Ports = lists:seq(FirstPort, length(NodeNames) + FirstPort - 1),
-    InitialCfgs = case InitialCfg0 of
-                      [{_, _}|_] -> [InitialCfg0 || _ <- NodeNames];
-                      _          -> InitialCfg0
-                  end,
-    Nodes = [[{nodename, N}, {port, P},
-              {mnesia_dir, rabbit_misc:format("rabbitmq-~s-mnesia", [N])} |
-              strip_non_initial(Cfg)]
-             || {N, P, Cfg} <- lists:zip3(NodeNames, Ports, InitialCfgs)],
-    [start_node(Node) || Node <- Nodes].
-
-check_node_not_running(Node, Already) ->
-    case lists:member(Node, Already) of
-        true  -> exit({node_already_running, Node});
-        false -> ok
-    end.
-
-strip_non_initial(Cfg) ->
-    [{K, V} || {K, V} <- Cfg, lists:member(K, ?INITIAL_KEYS)].
-
-strip_running(Cfg) ->
-    [{K, V} || {K, V} <- Cfg, lists:member(K, ?NON_RUNNING_KEYS)].
-
-enable_plugins(Cfg) ->
-    enable_plugins(pget(plugins, Cfg), pget(server, Cfg), Cfg).
-
-enable_plugins(none, _Server, _Cfg) -> ok;
-enable_plugins(_Dir, Server, Cfg) ->
-    R = execute(Cfg, Server ++ "/scripts/rabbitmq-plugins list -m"),
-    Plugins = string:join(string:tokens(R, "\n"), " "),
-    execute(Cfg, {Server ++ "/scripts/rabbitmq-plugins set --offline ~s",
-                  [Plugins]}),
-    ok.
-
-start_node(Cfg0) ->
-    Node = rabbit_nodes:make(pget(nodename, Cfg0)),
-    Cfg = [{node, Node} | Cfg0],
-    Server = pget(server, Cfg),
-    Linked = execute_bg(Cfg, Server ++ "/scripts/rabbitmq-server"),
-    rabbitmqctl(Cfg, {"wait ~s", [pid_file(Cfg)]}),
-    OSPid = rpc:call(Node, os, getpid, []),
-    %% The cover system thinks all nodes with the same name are the
-    %% same node and will automaticaly re-establish cover as soon as
-    %% we see them, so we only want to start cover once per node name
-    %% for the entire test run.
-    case {pget(cover, Cfg), lists:member(Node, cover:which_nodes())} of
-        {true, false} -> cover:start([Node]);
-        _             -> ok
-    end,
-    [{os_pid,     OSPid},
-     {linked_pid, Linked} | Cfg].
-
-start_node_fail(Cfg0) ->
-    Node = rabbit_nodes:make(pget(nodename, Cfg0)),
-    Cfg = [{node, Node}, {acceptable_exit_codes, lists:seq(1, 255)} | Cfg0],
-    Server = pget(server, Cfg),
-    execute(Cfg, Server ++ "/scripts/rabbitmq-server"),
-    ok.
-
-build_cluster([First | Rest]) ->
-    add_to_cluster([First], Rest).
-
-add_to_cluster([First | _] = Existing, New) ->
-    [cluster_with(First, Node) || Node <- New],
-    Existing ++ New.
-
-cluster_with(Cfg, NewCfg) ->
-    Node = pget(node, Cfg),
-    rabbitmqctl(NewCfg, stop_app),
-    rabbitmqctl(NewCfg, {"join_cluster ~s", [Node]}),
-    rabbitmqctl(NewCfg, start_app).
-
-rabbitmqctl(Cfg, Str) ->
-    Node = pget(node, Cfg),
-    Server = pget(server, Cfg),
-    Cmd = case Node of
-              undefined -> {"~s", [fmt(Str)]};
-              _         -> {"-n ~s ~s", [Node, fmt(Str)]}
-          end,
-    execute(Cfg, {Server ++ "/scripts/rabbitmqctl ~s", [fmt(Cmd)]}).
-
-rabbitmqctl_fail(Cfg, Str) ->
-    rabbitmqctl([{acceptable_exit_codes, lists:seq(1, 255)} | Cfg], Str).
-
-ha_policy_all([Cfg | _] = Cfgs) ->
-    set_ha_policy(Cfg, <<".*">>, <<"all">>),
-    Cfgs.
-
-ha_policy_two_pos([Cfg | _] = Cfgs) ->
-    Members = [a2b(pget(node, C)) || C <- Cfgs],
-    TwoNodes = [M || M <- lists:sublist(Members, 2)],
-    set_ha_policy(Cfg, <<"^ha.two.">>, {<<"nodes">>, TwoNodes},
-                  [{<<"ha-promote-on-shutdown">>, <<"always">>}]),
-    set_ha_policy(Cfg, <<"^ha.auto.">>, {<<"nodes">>, TwoNodes},
-                  [{<<"ha-sync-mode">>,           <<"automatic">>},
-                   {<<"ha-promote-on-shutdown">>, <<"always">>}]),
-    Cfgs.
-
-start_connections(Nodes) -> [start_connection(Node) || Node <- Nodes].
-
-start_connection(Cfg) ->
-    Port = pget(port, Cfg),
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{port = Port}),
-    {ok, Ch} =  amqp_connection:open_channel(Conn),
-    [{connection, Conn}, {channel, Ch} | Cfg].
-
-stop_nodes(Nodes) -> [stop_node(Node) || Node <- Nodes].
-
-stop_node(Cfg) ->
-    maybe_flush_cover(Cfg),
-    catch rabbitmqctl(Cfg, {"stop ~s", [pid_file(Cfg)]}),
-    strip_running(Cfg).
-
-kill_node(Cfg) ->
-    maybe_flush_cover(Cfg),
-    OSPid = pget(os_pid, Cfg),
-    catch execute(Cfg, {"kill -9 ~s", [OSPid]}),
-    await_os_pid_death(OSPid),
-    strip_running(Cfg).
-
-await_os_pid_death(OSPid) ->
-    case rabbit_misc:is_os_process_alive(OSPid) of
-        true  -> timer:sleep(100),
-                 await_os_pid_death(OSPid);
-        false -> ok
-    end.
-
-restart_node(Cfg) ->
-    start_node(stop_node(Cfg)).
-
-maybe_flush_cover(Cfg) ->
-    case pget(cover, Cfg) of
-        true  -> cover:flush(pget(node, Cfg));
-        false -> ok
-    end.
-
-%% Cover slows things down enough that if we are sending messages in
-%% bulk, we want to send fewer or we'll be here all day...
-cover_work_factor(Without, Cfg) ->
-    case pget(cover, Cfg) of
-        true  -> trunc(Without * 0.1);
-        false -> Without
-    end.
-
-%%----------------------------------------------------------------------------
-
-execute(Cmd) ->
-    execute([], Cmd, [0]).
-
-execute(Cfg, Cmd) ->
-    %% code 137 -> killed with SIGKILL which we do in some tests
-    execute(environment(Cfg), Cmd, pget(acceptable_exit_codes, Cfg, [0, 137])).
-
-execute(Env0, Cmd0, AcceptableExitCodes) ->
-    Env = [{"RABBITMQ_" ++ K, fmt(V)} || {K, V} <- Env0],
-    Cmd = fmt(Cmd0),
-    error_logger:info_msg("Invoking '~s'~n", [Cmd]),
-    Port = erlang:open_port(
-             {spawn, "/usr/bin/env sh -c \"" ++ Cmd ++ "\""},
-             [{env, Env}, exit_status,
-              stderr_to_stdout, use_stdio]),
-    port_receive_loop(Port, "", AcceptableExitCodes).
-
-environment(Cfg) ->
-    Nodename = pget(nodename, Cfg),
-    Plugins = pget(plugins, Cfg),
-    case Nodename of
-        undefined ->
-            plugins_env(Plugins);
-        _         ->
-            Port = pget(port, Cfg),
-            Base = pget(base, Cfg),
-            Server = pget(server, Cfg),
-            [{"MNESIA_DIR",         {"~s/~s", [Base, pget(mnesia_dir, Cfg)]}},
-             {"PLUGINS_EXPAND_DIR", {"~s/~s-plugins-expand", [Base, Nodename]}},
-             {"LOG_BASE",           {"~s", [Base]}},
-             {"NODENAME",           {"~s", [Nodename]}},
-             {"NODE_PORT",          {"~B", [Port]}},
-             {"PID_FILE",           pid_file(Cfg)},
-             {"CONFIG_FILE",        "/some/path/which/does/not/exist"},
-             {"ALLOW_INPUT",        "1"}, %% Needed to make it close on exit
-             %% Bit of a hack - only needed for mgmt tests.
-             {"SERVER_START_ARGS",
-              {"-rabbitmq_management listener [{port,1~B}]", [Port]}},
-             {"SERVER_ERL_ARGS",
-              %% Next two lines are defaults
-              {"+K true +A30 +P 1048576 "
-               "-kernel inet_default_connect_options [{nodelay,true}] "
-               %% Some tests need to be able to make distribution unhappy
-               "-pa ~s/../rabbitmq-test/ebin "
-               "-proto_dist inet_proxy", [Server]}}
-             | plugins_env(Plugins)]
-    end.
-
-plugins_env(none) ->
-    [{"ENABLED_PLUGINS_FILE", "/does-not-exist"}];
-plugins_env(Dir) ->
-    [{"PLUGINS_DIR",          {"~s/plugins", [Dir]}},
-     {"PLUGINS_EXPAND_DIR",   {"~s/expand", [Dir]}},
-     {"ENABLED_PLUGINS_FILE", {"~s/enabled_plugins", [Dir]}}].
-
-pid_file(Cfg) ->
-    rabbit_misc:format("~s/~s.pid", [pget(base, Cfg), pget(nodename, Cfg)]).
-
-port_receive_loop(Port, Stdout, AcceptableExitCodes) ->
-    receive
-        {Port, {exit_status, X}} ->
-            Fmt = "Command exited with code ~p~nStdout: ~s~n",
-            Args = [X, Stdout],
-            case lists:member(X, AcceptableExitCodes) of
-                true  -> error_logger:info_msg(Fmt, Args),
-                         Stdout;
-                false -> error_logger:error_msg(Fmt, Args),
-                         exit({exit_status, X, AcceptableExitCodes, Stdout})
-            end;
-        {Port, {data, Out}} ->
-            port_receive_loop(Port, Stdout ++ Out, AcceptableExitCodes)
-    end.
-
-execute_bg(Cfg, Cmd) ->
-    spawn_link(fun () ->
-                       execute(Cfg, Cmd),
-                       {links, Links} = process_info(self(), links),
-                       [unlink(L) || L <- Links]
-               end).
-
-fmt({Fmt, Args}) -> rabbit_misc:format(Fmt, Args);
-fmt(Str)         -> Str.
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl
deleted file mode 100644 (file)
index d0df292..0000000
+++ /dev/null
@@ -1,230 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_test_runner).
-
--include_lib("kernel/include/file.hrl").
-
--define(TIMEOUT, 600).
-
--import(rabbit_misc, [pget/2]).
-
--export([run_in_broker/2, run_multi/5]).
-
-run_in_broker(Dir, Filter) ->
-    add_server_test_ebin_dir(),
-    io:format("~nIn-broker tests~n================~n~n", []),
-    eunit:test(make_tests_single(Dir, Filter, ?TIMEOUT), []).
-
-run_multi(ServerDir, Dir, Filter, Cover, PluginsDir) ->
-    io:format("~nMulti-node tests~n================~n~n", []),
-    %% Umbrella does not give us -sname
-    net_kernel:start([?MODULE, shortnames]),
-    inets:start(), %% Used by HTTP tests
-    error_logger:tty(false),
-    case Cover of
-        true  -> io:format("Cover compiling..."),
-                 cover:start(),
-                 ok = rabbit_misc:enable_cover(["../rabbitmq-server/"]),
-                 io:format(" done.~n~n");
-        false -> ok
-    end,
-    R = eunit:test(make_tests_multi(
-                     ServerDir, Dir, Filter, Cover, PluginsDir, ?TIMEOUT), []),
-    case Cover of
-        true  -> io:format("~nCover reporting..."),
-                 ok = rabbit_misc:report_cover(),
-                 io:format(" done.~n~n");
-        false -> ok
-    end,
-    R.
-
-make_tests_single(Dir, Filter, Timeout) ->
-    {Filtered, AllCount, Width} = find_tests(Dir, Filter, "_test"),
-    io:format("Running ~B of ~B tests; FILTER=~s~n~n",
-              [length(Filtered), AllCount, Filter]),
-    [make_test_single(M, FWith, F, ShowHeading, Timeout, Width)
-     || {M, FWith, F, ShowHeading} <- annotate_show_heading(Filtered)].
-
-make_tests_multi(ServerDir, Dir, Filter, Cover, PluginsDir, Timeout) ->
-    {Filtered, AllCount, Width} = find_tests(Dir, Filter, "_with"),
-    io:format("Running ~B of ~B tests; FILTER=~s; COVER=~s~n~n",
-              [length(Filtered), AllCount, Filter, Cover]),
-    Cfg = [{cover,   Cover},
-           {base,    basedir() ++ "/nodes"},
-           {server,  ServerDir},
-           {plugins, PluginsDir}],
-    rabbit_test_configs:enable_plugins(Cfg),
-    [make_test_multi(M, FWith, F, ShowHeading, Timeout, Width, Cfg)
-     || {M, FWith, F, ShowHeading} <- annotate_show_heading(Filtered)].
-
-find_tests(Dir, Filter, Suffix) ->
-    All = [{M, FWith, F} ||
-              M <- modules(Dir),
-              {FWith, _Arity} <- proplists:get_value(exports, M:module_info()),
-              string:right(atom_to_list(FWith), length(Suffix)) =:= Suffix,
-              F <- [truncate_function_name(FWith, length(Suffix))]],
-    Filtered = [Test || {M, _FWith, F} = Test <- All,
-                        should_run(M, F, Filter)],
-    Width = case Filtered of
-                [] -> 0;
-                _  -> lists:max([atom_length(F) || {_, _, F} <- Filtered])
-            end,
-    {Filtered, length(All), Width}.
-
-make_test_single(M, FWith, F, ShowHeading, Timeout, Width) ->
-    {timeout,
-     Timeout,
-     fun () ->
-             maybe_print_heading(M, ShowHeading),
-             io:format(user, "~s [running]", [name(F, Width)]),
-             M:FWith(),
-             io:format(user, " [PASSED].~n", [])
-     end}.
-
-make_test_multi(M, FWith, F, ShowHeading, Timeout, Width, InitialCfg) ->
-    {setup,
-     fun () ->
-             maybe_print_heading(M, ShowHeading),
-             io:format(user, "~s [setup]", [name(F, Width)]),
-             setup_error_logger(M, F, basedir()),
-             recursive_delete(pget(base, InitialCfg)),
-             try
-                 apply_config(M:FWith(), InitialCfg)
-             catch
-                 error:{Type, Error, Cfg, Stack} ->
-                     case Cfg of
-                         InitialCfg -> ok; %% [0]
-                         _          -> rabbit_test_configs:stop_nodes(Cfg)
-                     end,
-                     exit({Type, Error, Stack})
-             end
-     end,
-     fun (Nodes) ->
-             rabbit_test_configs:stop_nodes(Nodes),
-             %% Partition tests change this, let's revert
-             net_kernel:set_net_ticktime(60, 1),
-             io:format(user, ".~n", [])
-     end,
-     fun (Nodes) ->
-             [{timeout,
-               Timeout,
-               fun () ->
-                       [link(pget(linked_pid, N)) || N <- Nodes],
-                       io:format(user, " [running]", []),
-                       %%try
-                           M:F(Nodes),
-                           io:format(user, " [PASSED]", [])
-                       %% catch
-                       %%     Type:Reason ->
-                       %%         io:format(user, "YYY stop~n", []),
-                       %%         rabbit_test_configs:stop_nodes(Nodes),
-                       %%         exit({Type, Reason, erlang:get_stacktrace()})
-                       %% end
-               end}]
-     end}.
-%% [0] If we didn't get as far as starting any nodes then we only have
-%% one proplist for initial config, not several per node. So avoid
-%% trying to "stop" it - it won't work (and there's nothing to do
-%% anyway).
-
-maybe_print_heading(M, true) ->
-    io:format(user, "~n~s~n~s~n", [M, string:chars($-, atom_length(M))]);
-maybe_print_heading(_M, false) ->
-    ok.
-
-apply_config(Things, Cfg) when is_list(Things) ->
-    lists:foldl(fun apply_config/2, Cfg, Things);
-apply_config(F, Cfg) when is_atom(F) ->
-    apply_config(fun (C) -> rabbit_test_configs:F(C) end, Cfg);
-apply_config(F, Cfg) when is_function(F) ->
-    try
-        F(Cfg)
-    catch
-        Type:Error -> erlang:error({Type, Error, Cfg, erlang:get_stacktrace()})
-    end.
-
-annotate_show_heading(List) ->
-    annotate_show_heading(List, undefined).
-
-annotate_show_heading([], _) ->
-    [];
-annotate_show_heading([{M, FWith, F} | Rest], Current) ->
-    [{M, FWith, F, M =/= Current} | annotate_show_heading(Rest, M)].
-
-setup_error_logger(M, F, Base) ->
-    case error_logger_logfile_filename() of
-        {error, no_log_file} -> ok;
-        _                    -> ok = error_logger:logfile(close)
-    end,
-    FN = rabbit_misc:format("~s/~s:~s.log", [basedir(), M, F]),
-    ensure_dir(Base),
-    ok = error_logger:logfile({open, FN}).
-
-truncate_function_name(FWith, Length) ->
-    FName = atom_to_list(FWith),
-    list_to_atom(string:substr(FName, 1, length(FName) - Length)).
-
-should_run(_M, _F, "all") -> true;
-should_run(M, F, Filter)  -> MF = rabbit_misc:format("~s:~s", [M, F]),
-                             case re:run(MF, Filter) of
-                                 {match, _} -> true;
-                                 nomatch    -> false
-                             end.
-
-ensure_dir(Path) ->
-    case file:read_file_info(Path) of
-        {ok, #file_info{type=regular}}   -> exit({exists_as_file, Path});
-        {ok, #file_info{type=directory}} -> ok;
-        _                                -> file:make_dir(Path)
-    end.
-
-modules(RelDir) ->
-    {ok, Files} = file:list_dir(RelDir),
-    [M || F <- Files,
-          M <- case string:tokens(F, ".") of
-                   [MStr, "beam"] -> [list_to_atom(MStr)];
-                   _              -> []
-               end].
-
-recursive_delete(Dir) ->
-    rabbit_test_configs:execute({"rm -rf ~s", [Dir]}).
-
-name(F, Width) ->
-    R = atom_to_list(F),
-    R ++ ":" ++ string:chars($ , Width - length(R)).
-
-atom_length(A) -> length(atom_to_list(A)).
-
-basedir() -> "/tmp/rabbitmq-multi-node".
-
-%% reimplement error_logger:logfile(filename) only using
-%% gen_event:call/4 instead of gen_event:call/3 with our old friend
-%% the 5 second timeout. Grr.
-error_logger_logfile_filename() ->
-    case gen_event:call(
-           error_logger, error_logger_file_h, filename, infinity) of
-       {error,_} -> {error, no_log_file};
-       Val       -> Val
-    end.
-
-add_server_test_ebin_dir() ->
-    %% Some tests need modules from this dir, but it's not on the path
-    %% by default.
-    {file, Path} = code:is_loaded(rabbit),
-    Ebin = filename:dirname(Path),
-    TestEbin = filename:join([Ebin, "..", "test", "ebin"]),
-    code:add_path(TestEbin).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl
deleted file mode 100644 (file)
index 973e1b0..0000000
+++ /dev/null
@@ -1,147 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(rabbit_test_util).
-
--include_lib("amqp_client/include/amqp_client.hrl").
--import(rabbit_misc, [pget/2]).
-
--compile(export_all).
-
-set_ha_policy(Cfg, Pattern, Policy) ->
-    set_ha_policy(Cfg, Pattern, Policy, []).
-
-set_ha_policy(Cfg, Pattern, Policy, Extra) ->
-    set_policy(Cfg, Pattern, Pattern, <<"queues">>, ha_policy(Policy) ++ Extra).
-
-ha_policy(<<"all">>)      -> [{<<"ha-mode">>,   <<"all">>}];
-ha_policy({Mode, Params}) -> [{<<"ha-mode">>,   Mode},
-                              {<<"ha-params">>, Params}].
-
-set_policy(Cfg, Name, Pattern, ApplyTo, Definition) ->
-    ok = rpc:call(pget(node, Cfg), rabbit_policy, set,
-                  [<<"/">>, Name, Pattern, Definition, 0, ApplyTo]).
-
-clear_policy(Cfg, Name) ->
-    ok = rpc:call(pget(node, Cfg), rabbit_policy, delete, [<<"/">>, Name]).
-
-set_param(Cfg, Component, Name, Value) ->
-    ok = rpc:call(pget(node, Cfg), rabbit_runtime_parameters, set,
-                  [<<"/">>, Component, Name, Value, none]).
-
-clear_param(Cfg, Component, Name) ->
-    ok = rpc:call(pget(node, Cfg), rabbit_runtime_parameters, clear,
-                 [<<"/">>, Component, Name]).
-
-enable_plugin(Cfg, Plugin) ->
-    plugins_action(enable, Cfg, [Plugin], []).
-
-disable_plugin(Cfg, Plugin) ->
-    plugins_action(disable, Cfg, [Plugin], []).
-
-control_action(Command, Cfg) ->
-    control_action(Command, Cfg, [], []).
-
-control_action(Command, Cfg, Args) ->
-    control_action(Command, Cfg, Args, []).
-
-control_action(Command, Cfg, Args, Opts) ->
-    Node = pget(node, Cfg),
-    rpc:call(Node, rabbit_control_main, action,
-             [Command, Node, Args, Opts,
-              fun (F, A) ->
-                      error_logger:info_msg(F ++ "~n", A)
-              end]).
-
-plugins_action(Command, Cfg, Args, Opts) ->
-    PluginsFile = os:getenv("RABBITMQ_ENABLED_PLUGINS_FILE"),
-    PluginsDir = os:getenv("RABBITMQ_PLUGINS_DIR"),
-    Node = pget(node, Cfg),
-    rpc:call(Node, rabbit_plugins_main, action,
-             [Command, Node, Args, Opts, PluginsFile, PluginsDir]).
-
-restart_app(Cfg) ->
-    stop_app(Cfg),
-    start_app(Cfg).
-
-stop_app(Cfg) ->
-    control_action(stop_app, Cfg).
-
-start_app(Cfg) ->
-    control_action(start_app, Cfg).
-
-connect(Cfg) ->
-    Port = pget(port, Cfg),
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{port = Port}),
-    {ok, Ch} =  amqp_connection:open_channel(Conn),
-    {Conn, Ch}.
-
-%%----------------------------------------------------------------------------
-
-kill_after(Time, Cfg, Method) ->
-    timer:sleep(Time),
-    kill(Cfg, Method).
-
-kill(Cfg, Method) ->
-    kill0(Cfg, Method),
-    wait_down(pget(node, Cfg)).
-
-kill0(Cfg, stop)    -> rabbit_test_configs:stop_node(Cfg);
-kill0(Cfg, sigkill) -> rabbit_test_configs:kill_node(Cfg).
-
-wait_down(Node) ->
-    case net_adm:ping(Node) of
-        pong -> timer:sleep(25),
-                wait_down(Node);
-        pang -> ok
-    end.
-
-a2b(A) -> list_to_binary(atom_to_list(A)).
-
-%%----------------------------------------------------------------------------
-
-publish(Ch, QName, Count) ->
-    amqp_channel:call(Ch, #'confirm.select'{}),
-    [amqp_channel:call(Ch,
-                       #'basic.publish'{routing_key = QName},
-                       #amqp_msg{props   = #'P_basic'{delivery_mode = 2},
-                                 payload = list_to_binary(integer_to_list(I))})
-     || I <- lists:seq(1, Count)],
-    amqp_channel:wait_for_confirms(Ch).
-
-consume(Ch, QName, Count) ->
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue = QName, no_ack = true},
-                           self()),
-    CTag = receive #'basic.consume_ok'{consumer_tag = C} -> C end,
-    [begin
-         Exp = list_to_binary(integer_to_list(I)),
-         receive {#'basic.deliver'{consumer_tag = CTag},
-                  #amqp_msg{payload = Exp}} ->
-                 ok
-         after 500 ->
-                 exit(timeout)
-         end
-     end|| I <- lists:seq(1, Count)],
-    #'queue.declare_ok'{message_count = 0}
-        = amqp_channel:call(Ch, #'queue.declare'{queue   = QName,
-                                                 durable = true}),
-    amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag}),
-    ok.
-
-fetch(Ch, QName, Count) ->
-    [{#'basic.get_ok'{}, _} =
-         amqp_channel:call(Ch, #'basic.get'{queue = QName}) ||
-        _ <- lists:seq(1, Count)],
-    ok.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src
deleted file mode 100644 (file)
index 108f874..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-{application, rabbitmq_test,
- [
-  {description, ""},
-  {vsn, "1"},
-  {registered, []},
-  {applications, [
-                  kernel,
-                  stdlib
-                 ]},
-  {env, []}
- ]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl
deleted file mode 100644 (file)
index 258c0dc..0000000
+++ /dev/null
@@ -1,194 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(cluster_rename).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
-
--define(CLUSTER2,
-        fun(C) -> rabbit_test_configs:cluster(C, [bugs, bigwig]) end).
-
--define(CLUSTER3,
-        fun(C) -> rabbit_test_configs:cluster(C, [bugs, bigwig, peter]) end).
-
-%% Rolling rename of a cluster, each node should do a secondary rename.
-rename_cluster_one_by_one_with() -> ?CLUSTER3.
-rename_cluster_one_by_one([Bugs, Bigwig, Peter]) ->
-    publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]),
-
-    Jessica = stop_rename_start(Bugs,   jessica, [bugs, jessica]),
-    Hazel   = stop_rename_start(Bigwig, hazel,   [bigwig, hazel]),
-    Flopsy  = stop_rename_start(Peter,  flopsy,  [peter, flopsy]),
-
-    consume_all([{Jessica, <<"1">>}, {Hazel, <<"2">>}, {Flopsy, <<"3">>}]),
-    stop_all([Jessica, Hazel, Flopsy]),
-    ok.
-
-%% Big bang rename of a cluster, bugs should do a primary rename.
-rename_cluster_big_bang_with() -> ?CLUSTER3.
-rename_cluster_big_bang([Bugs, Bigwig, Peter]) ->
-    publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]),
-
-    Peter1  = rabbit_test_configs:stop_node(Peter),
-    Bigwig1 = rabbit_test_configs:stop_node(Bigwig),
-    Bugs1   = rabbit_test_configs:stop_node(Bugs),
-
-    Map = [bugs, jessica, bigwig, hazel, peter, flopsy],
-    Jessica0 = rename_node(Bugs1,   jessica, Map),
-    Hazel0   = rename_node(Bigwig1, hazel,   Map),
-    Flopsy0  = rename_node(Peter1,  flopsy,  Map),
-
-    Jessica = rabbit_test_configs:start_node(Jessica0),
-    Hazel   = rabbit_test_configs:start_node(Hazel0),
-    Flopsy  = rabbit_test_configs:start_node(Flopsy0),
-
-    consume_all([{Jessica, <<"1">>}, {Hazel, <<"2">>}, {Flopsy, <<"3">>}]),
-    stop_all([Jessica, Hazel, Flopsy]),
-    ok.
-
-%% Here we test that bugs copes with things being renamed around it.
-partial_one_by_one_with() -> ?CLUSTER3.
-partial_one_by_one([Bugs, Bigwig, Peter]) ->
-    publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]),
-
-    Jessica = stop_rename_start(Bugs,   jessica, [bugs, jessica]),
-    Hazel   = stop_rename_start(Bigwig, hazel,   [bigwig, hazel]),
-
-    consume_all([{Jessica, <<"1">>}, {Hazel, <<"2">>}, {Peter, <<"3">>}]),
-    stop_all([Jessica, Hazel, Peter]),
-    ok.
-
-%% Here we test that bugs copes with things being renamed around it.
-partial_big_bang_with() -> ?CLUSTER3.
-partial_big_bang([Bugs, Bigwig, Peter]) ->
-    publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]),
-
-    Peter1  = rabbit_test_configs:stop_node(Peter),
-    Bigwig1 = rabbit_test_configs:stop_node(Bigwig),
-    Bugs1   = rabbit_test_configs:stop_node(Bugs),
-
-    Map = [bigwig, hazel, peter, flopsy],
-    Hazel0   = rename_node(Bigwig1, hazel,   Map),
-    Flopsy0  = rename_node(Peter1,  flopsy,  Map),
-
-    Bugs2  = rabbit_test_configs:start_node(Bugs1),
-    Hazel  = rabbit_test_configs:start_node(Hazel0),
-    Flopsy = rabbit_test_configs:start_node(Flopsy0),
-
-    consume_all([{Bugs2, <<"1">>}, {Hazel, <<"2">>}, {Flopsy, <<"3">>}]),
-    stop_all([Bugs2, Hazel, Flopsy]),
-    ok.
-
-%% We should be able to specify the -n parameter on ctl with either
-%% the before or after name for the local node (since in real cases
-%% one might want to invoke the command before or after the hostname
-%% has changed) - usually we test before so here we test after.
-post_change_nodename_with() -> ?CLUSTER2.
-post_change_nodename([Bugs, _Bigwig]) ->
-    publish(Bugs, <<"bugs">>),
-
-    Bugs1    = rabbit_test_configs:stop_node(Bugs),
-    Bugs2    = [{nodename, jessica} | proplists:delete(nodename, Bugs1)],
-    Jessica0 = rename_node(Bugs2, jessica, [bugs, jessica]),
-    Jessica  = rabbit_test_configs:start_node(Jessica0),
-
-    consume(Jessica, <<"bugs">>),
-    stop_all([Jessica]),
-    ok.
-
-%% If we invoke rename but the node name does not actually change, we
-%% should roll back.
-abortive_rename_with() -> ?CLUSTER2.
-abortive_rename([Bugs, _Bigwig]) ->
-    publish(Bugs,  <<"bugs">>),
-
-    Bugs1   = rabbit_test_configs:stop_node(Bugs),
-    _Jessica = rename_node(Bugs1, jessica, [bugs, jessica]),
-    Bugs2 = rabbit_test_configs:start_node(Bugs1),
-
-    consume(Bugs2, <<"bugs">>),
-    ok.
-
-%% And test some ways the command can fail.
-rename_fail_with() -> ?CLUSTER2.
-rename_fail([Bugs, _Bigwig]) ->
-    Bugs1 = rabbit_test_configs:stop_node(Bugs),
-    %% Rename from a node that does not exist
-    rename_node_fail(Bugs1, [bugzilla, jessica]),
-    %% Rename to a node which does
-    rename_node_fail(Bugs1, [bugs, bigwig]),
-    %% Rename two nodes to the same thing
-    rename_node_fail(Bugs1, [bugs, jessica, bigwig, jessica]),
-    %% Rename while impersonating a node not in the cluster
-    rename_node_fail(set_node(rabbit, Bugs1), [bugs, jessica]),
-    ok.
-
-rename_twice_fail_with() -> ?CLUSTER2.
-rename_twice_fail([Bugs, _Bigwig]) ->
-    Bugs1 = rabbit_test_configs:stop_node(Bugs),
-    Indecisive = rename_node(Bugs1, indecisive, [bugs, indecisive]),
-    rename_node_fail(Indecisive, [indecisive, jessica]),
-    ok.
-
-%% ----------------------------------------------------------------------------
-
-%% Normal post-test stop does not work since names have changed...
-stop_all(Cfgs) ->
-     [rabbit_test_configs:stop_node(Cfg) || Cfg <- Cfgs].
-
-stop_rename_start(Cfg, Nodename, Map) ->
-    rabbit_test_configs:start_node(
-      rename_node(rabbit_test_configs:stop_node(Cfg), Nodename, Map)).
-
-rename_node(Cfg, Nodename, Map) ->
-    rename_node(Cfg, Nodename, Map, fun rabbit_test_configs:rabbitmqctl/2).
-
-rename_node_fail(Cfg, Map) ->
-    rename_node(Cfg, ignored, Map, fun rabbit_test_configs:rabbitmqctl_fail/2).
-
-rename_node(Cfg, Nodename, Map, Ctl) ->
-    MapS = string:join(
-             [atom_to_list(rabbit_nodes:make(N)) || N <- Map], " "),
-    Ctl(Cfg, {"rename_cluster_node ~s", [MapS]}),
-    set_node(Nodename, Cfg).
-
-publish(Cfg, Q) ->
-    Ch = pget(channel, Cfg),
-    amqp_channel:call(Ch, #'confirm.select'{}),
-    amqp_channel:call(Ch, #'queue.declare'{queue = Q, durable = true}),
-    amqp_channel:cast(Ch, #'basic.publish'{routing_key = Q},
-                      #amqp_msg{props   = #'P_basic'{delivery_mode = 2},
-                                payload = Q}),
-    amqp_channel:wait_for_confirms(Ch).
-
-consume(Cfg, Q) ->
-    {_Conn, Ch} = rabbit_test_util:connect(Cfg),
-    amqp_channel:call(Ch, #'queue.declare'{queue = Q, durable = true}),
-    {#'basic.get_ok'{}, #amqp_msg{payload = Q}} =
-        amqp_channel:call(Ch, #'basic.get'{queue = Q}).
-
-
-publish_all(CfgsKeys) ->
-    [publish(Cfg, Key) || {Cfg, Key} <- CfgsKeys].
-
-consume_all(CfgsKeys) ->
-    [consume(Cfg, Key) || {Cfg, Key} <- CfgsKeys].
-
-set_node(Nodename, Cfg) ->
-    [{nodename, Nodename} | proplists:delete(nodename, Cfg)].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl
deleted file mode 100644 (file)
index b114aab..0000000
+++ /dev/null
@@ -1,608 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(clustering_management).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
-
--define(LOOP_RECURSION_DELAY, 100).
-
-join_and_part_cluster_with() -> start_abc.
-join_and_part_cluster(Config) ->
-    [Rabbit, Hare, Bunny] = cluster_members(Config),
-    assert_not_clustered(Rabbit),
-    assert_not_clustered(Hare),
-    assert_not_clustered(Bunny),
-
-    stop_join_start(Rabbit, Bunny),
-    assert_clustered([Rabbit, Bunny]),
-
-    stop_join_start(Hare, Bunny, true),
-    assert_cluster_status(
-      {[Bunny, Hare, Rabbit], [Bunny, Rabbit], [Bunny, Hare, Rabbit]},
-      [Rabbit, Hare, Bunny]),
-
-    %% Allow clustering with already clustered node
-    ok = stop_app(Rabbit),
-    {ok, already_member} = join_cluster(Rabbit, Hare),
-    ok = start_app(Rabbit),
-
-    stop_reset_start(Rabbit),
-    assert_not_clustered(Rabbit),
-    assert_cluster_status({[Bunny, Hare], [Bunny], [Bunny, Hare]},
-                          [Hare, Bunny]),
-
-    stop_reset_start(Hare),
-    assert_not_clustered(Hare),
-    assert_not_clustered(Bunny).
-
-join_cluster_bad_operations_with() -> start_abc.
-join_cluster_bad_operations(Config) ->
-    [Rabbit, Hare, Bunny] = cluster_members(Config),
-
-    %% Non-existant node
-    ok = stop_app(Rabbit),
-    assert_failure(fun () -> join_cluster(Rabbit, non@existant) end),
-    ok = start_app(Rabbit),
-    assert_not_clustered(Rabbit),
-
-    %% Trying to cluster with mnesia running
-    assert_failure(fun () -> join_cluster(Rabbit, Bunny) end),
-    assert_not_clustered(Rabbit),
-
-    %% Trying to cluster the node with itself
-    ok = stop_app(Rabbit),
-    assert_failure(fun () -> join_cluster(Rabbit, Rabbit) end),
-    ok = start_app(Rabbit),
-    assert_not_clustered(Rabbit),
-
-    %% Do not let the node leave the cluster or reset if it's the only
-    %% ram node
-    stop_join_start(Hare, Rabbit, true),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit], [Rabbit, Hare]},
-                          [Rabbit, Hare]),
-    ok = stop_app(Hare),
-    assert_failure(fun () -> join_cluster(Rabbit, Bunny) end),
-    assert_failure(fun () -> reset(Rabbit) end),
-    ok = start_app(Hare),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit], [Rabbit, Hare]},
-                          [Rabbit, Hare]),
-
-    %% Cannot start RAM-only node first
-    ok = stop_app(Rabbit),
-    ok = stop_app(Hare),
-    assert_failure(fun () -> start_app(Hare) end),
-    ok = start_app(Rabbit),
-    ok = start_app(Hare),
-    ok.
-
-%% This tests that the nodes in the cluster are notified immediately of a node
-%% join, and not just after the app is started.
-join_to_start_interval_with() -> start_abc.
-join_to_start_interval(Config) ->
-    [Rabbit, Hare, _Bunny] = cluster_members(Config),
-
-    ok = stop_app(Rabbit),
-    ok = join_cluster(Rabbit, Hare),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]},
-                          [Rabbit, Hare]),
-    ok = start_app(Rabbit),
-    assert_clustered([Rabbit, Hare]).
-
-forget_cluster_node_with() -> start_abc.
-forget_cluster_node([_, HareCfg, _] = Config) ->
-    [Rabbit, Hare, Bunny] = cluster_members(Config),
-
-    %% Trying to remove a node not in the cluster should fail
-    assert_failure(fun () -> forget_cluster_node(Hare, Rabbit) end),
-
-    stop_join_start(Rabbit, Hare),
-    assert_clustered([Rabbit, Hare]),
-
-    %% Trying to remove an online node should fail
-    assert_failure(fun () -> forget_cluster_node(Hare, Rabbit) end),
-
-    ok = stop_app(Rabbit),
-    %% We're passing the --offline flag, but Hare is online
-    assert_failure(fun () -> forget_cluster_node(Hare, Rabbit, true) end),
-    %% Removing some non-existant node will fail
-    assert_failure(fun () -> forget_cluster_node(Hare, non@existant) end),
-    ok = forget_cluster_node(Hare, Rabbit),
-    assert_not_clustered(Hare),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]},
-                          [Rabbit]),
-
-    %% Now we can't start Rabbit since it thinks that it's still in the cluster
-    %% with Hare, while Hare disagrees.
-    assert_failure(fun () -> start_app(Rabbit) end),
-
-    ok = reset(Rabbit),
-    ok = start_app(Rabbit),
-    assert_not_clustered(Rabbit),
-
-    %% Now we remove Rabbit from an offline node.
-    stop_join_start(Bunny, Hare),
-    stop_join_start(Rabbit, Hare),
-    assert_clustered([Rabbit, Hare, Bunny]),
-    ok = stop_app(Hare),
-    ok = stop_app(Rabbit),
-    ok = stop_app(Bunny),
-    %% This is fine but we need the flag
-    assert_failure(fun () -> forget_cluster_node(Hare, Bunny) end),
-    %% Also fails because hare node is still running
-    assert_failure(fun () -> forget_cluster_node(Hare, Bunny, true) end),
-    %% But this works
-    HareCfg2 = rabbit_test_configs:stop_node(HareCfg),
-    rabbit_test_configs:rabbitmqctl(
-      HareCfg2, {"forget_cluster_node --offline ~s", [Bunny]}),
-    _HareCfg3 = rabbit_test_configs:start_node(HareCfg2),
-    ok = start_app(Rabbit),
-    %% Bunny still thinks its clustered with Rabbit and Hare
-    assert_failure(fun () -> start_app(Bunny) end),
-    ok = reset(Bunny),
-    ok = start_app(Bunny),
-    assert_not_clustered(Bunny),
-    assert_clustered([Rabbit, Hare]).
-
-forget_removes_things_with() -> cluster_ab.
-forget_removes_things(Cfg) ->
-    test_removes_things(Cfg, fun (R, H) -> ok = forget_cluster_node(H, R) end).
-
-reset_removes_things_with() -> cluster_ab.
-reset_removes_things(Cfg) ->
-    test_removes_things(Cfg, fun (R, _H) -> ok = reset(R) end).
-
-test_removes_things([RabbitCfg, HareCfg] = Config, LoseRabbit) ->
-    Unmirrored = <<"unmirrored-queue">>,
-    [Rabbit, Hare] = cluster_members(Config),
-    RCh = pget(channel, RabbitCfg),
-    declare(RCh, Unmirrored),
-    ok = stop_app(Rabbit),
-
-    {_HConn, HCh} = rabbit_test_util:connect(HareCfg),
-    {'EXIT',{{shutdown,{server_initiated_close,404,_}}, _}} =
-        (catch declare(HCh, Unmirrored)),
-
-    ok = LoseRabbit(Rabbit, Hare),
-    {_HConn2, HCh2} = rabbit_test_util:connect(HareCfg),
-    declare(HCh2, Unmirrored),
-    ok.
-
-forget_offline_removes_things_with() -> cluster_ab.
-forget_offline_removes_things([Rabbit, Hare]) ->
-    Unmirrored = <<"unmirrored-queue">>,
-    X = <<"X">>,
-    RCh = pget(channel, Rabbit),
-    declare(RCh, Unmirrored),
-
-    amqp_channel:call(RCh, #'exchange.declare'{durable     = true,
-                                               exchange    = X,
-                                               auto_delete = true}),
-    amqp_channel:call(RCh, #'queue.bind'{queue    = Unmirrored,
-                                         exchange = X}),
-    ok = stop_app(pget(node, Rabbit)),
-
-    {_HConn, HCh} = rabbit_test_util:connect(Hare),
-    {'EXIT',{{shutdown,{server_initiated_close,404,_}}, _}} =
-        (catch declare(HCh, Unmirrored)),
-
-    Hare2 = rabbit_test_configs:stop_node(Hare),
-    _Rabbit2 = rabbit_test_configs:stop_node(Rabbit),
-    rabbit_test_configs:rabbitmqctl(
-      Hare2, {"forget_cluster_node --offline ~s", [pget(node, Rabbit)]}),
-    Hare3 = rabbit_test_configs:start_node(Hare2),
-
-    {_HConn2, HCh2} = rabbit_test_util:connect(Hare3),
-    declare(HCh2, Unmirrored),
-    {'EXIT',{{shutdown,{server_initiated_close,404,_}}, _}} =
-        (catch amqp_channel:call(HCh2,#'exchange.declare'{durable     = true,
-                                                          exchange    = X,
-                                                          auto_delete = true,
-                                                          passive     = true})),
-    ok.
-
-forget_promotes_offline_slave_with() ->
-    fun (Cfgs) ->
-            rabbit_test_configs:cluster(Cfgs, [a, b, c, d])
-    end.
-
-forget_promotes_offline_slave([A, B, C, D]) ->
-    ACh = pget(channel, A),
-    ANode = pget(node, A),
-    Q = <<"mirrored-queue">>,
-    declare(ACh, Q),
-    set_ha_policy(Q, A, [B, C]),
-    set_ha_policy(Q, A, [C, D]), %% Test add and remove from recoverable_slaves
-
-    %% Publish and confirm
-    amqp_channel:call(ACh, #'confirm.select'{}),
-    amqp_channel:cast(ACh, #'basic.publish'{routing_key = Q},
-                      #amqp_msg{props = #'P_basic'{delivery_mode = 2}}),
-    amqp_channel:wait_for_confirms(ACh),
-
-    %% We kill nodes rather than stop them in order to make sure
-    %% that we aren't dependent on anything that happens as they shut
-    %% down (see bug 26467).
-    D2 = rabbit_test_configs:kill_node(D),
-    C2 = rabbit_test_configs:kill_node(C),
-    _B2 = rabbit_test_configs:kill_node(B),
-    _A2 = rabbit_test_configs:kill_node(A),
-
-    rabbit_test_configs:rabbitmqctl(C2, "force_boot"),
-
-    C3 = rabbit_test_configs:start_node(C2),
-
-    %% We should now have the following dramatis personae:
-    %% A - down, master
-    %% B - down, used to be slave, no longer is, never had the message
-    %% C - running, should be slave, but has wiped the message on restart
-    %% D - down, recoverable slave, contains message
-    %%
-    %% So forgetting A should offline-promote the queue to D, keeping
-    %% the message.
-
-    rabbit_test_configs:rabbitmqctl(C3, {"forget_cluster_node ~s", [ANode]}),
-
-    D3 = rabbit_test_configs:start_node(D2),
-    {_DConn2, DCh2} = rabbit_test_util:connect(D3),
-    #'queue.declare_ok'{message_count = 1} = declare(DCh2, Q),
-    ok.
-
-set_ha_policy(Q, MasterCfg, SlaveCfgs) ->
-    Nodes = [list_to_binary(atom_to_list(pget(node, N))) ||
-                N <- [MasterCfg | SlaveCfgs]],
-    rabbit_test_util:set_ha_policy(MasterCfg, Q, {<<"nodes">>, Nodes}),
-    await_slaves(Q, pget(node, MasterCfg), [pget(node, C) || C <- SlaveCfgs]).
-
-await_slaves(Q, MNode, SNodes) ->
-    {ok, #amqqueue{pid        = MPid,
-                   slave_pids = SPids}} =
-        rpc:call(MNode, rabbit_amqqueue, lookup,
-                 [rabbit_misc:r(<<"/">>, queue, Q)]),
-    ActMNode = node(MPid),
-    ActSNodes = lists:usort([node(P) || P <- SPids]),
-    case {MNode, lists:usort(SNodes)} of
-        {ActMNode, ActSNodes} -> ok;
-        _                     -> timer:sleep(100),
-                                 await_slaves(Q, MNode, SNodes)
-    end.
-
-force_boot_with() -> cluster_ab.
-force_boot([Rabbit, Hare]) ->
-    rabbit_test_configs:rabbitmqctl_fail(Rabbit, force_boot),
-    Rabbit2 = rabbit_test_configs:stop_node(Rabbit),
-    _Hare2 = rabbit_test_configs:stop_node(Hare),
-    rabbit_test_configs:start_node_fail(Rabbit2),
-    rabbit_test_configs:rabbitmqctl(Rabbit2, force_boot),
-    _Rabbit3 = rabbit_test_configs:start_node(Rabbit2),
-    ok.
-
-change_cluster_node_type_with() -> start_abc.
-change_cluster_node_type(Config) ->
-    [Rabbit, Hare, _Bunny] = cluster_members(Config),
-
-    %% Trying to change the ram node when not clustered should always fail
-    ok = stop_app(Rabbit),
-    assert_failure(fun () -> change_cluster_node_type(Rabbit, ram) end),
-    assert_failure(fun () -> change_cluster_node_type(Rabbit, disc) end),
-    ok = start_app(Rabbit),
-
-    ok = stop_app(Rabbit),
-    join_cluster(Rabbit, Hare),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]},
-                          [Rabbit, Hare]),
-    change_cluster_node_type(Rabbit, ram),
-    assert_cluster_status({[Rabbit, Hare], [Hare], [Hare]},
-                          [Rabbit, Hare]),
-    change_cluster_node_type(Rabbit, disc),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]},
-                          [Rabbit, Hare]),
-    change_cluster_node_type(Rabbit, ram),
-    ok = start_app(Rabbit),
-    assert_cluster_status({[Rabbit, Hare], [Hare], [Hare, Rabbit]},
-                          [Rabbit, Hare]),
-
-    %% Changing to ram when you're the only ram node should fail
-    ok = stop_app(Hare),
-    assert_failure(fun () -> change_cluster_node_type(Hare, ram) end),
-    ok = start_app(Hare).
-
-change_cluster_when_node_offline_with() -> start_abc.
-change_cluster_when_node_offline(Config) ->
-    [Rabbit, Hare, Bunny] = cluster_members(Config),
-
-    %% Cluster the three notes
-    stop_join_start(Rabbit, Hare),
-    assert_clustered([Rabbit, Hare]),
-
-    stop_join_start(Bunny, Hare),
-    assert_clustered([Rabbit, Hare, Bunny]),
-
-    %% Bring down Rabbit, and remove Bunny from the cluster while
-    %% Rabbit is offline
-    ok = stop_app(Rabbit),
-    ok = stop_app(Bunny),
-    ok = reset(Bunny),
-    assert_cluster_status({[Bunny], [Bunny], []}, [Bunny]),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, [Hare]),
-    assert_cluster_status(
-      {[Rabbit, Hare, Bunny], [Rabbit, Hare, Bunny], [Hare, Bunny]}, [Rabbit]),
-
-    %% Bring Rabbit back up
-    ok = start_app(Rabbit),
-    assert_clustered([Rabbit, Hare]),
-    ok = start_app(Bunny),
-    assert_not_clustered(Bunny),
-
-    %% Now the same, but Rabbit is a RAM node, and we bring up Bunny
-    %% before
-    ok = stop_app(Rabbit),
-    ok = change_cluster_node_type(Rabbit, ram),
-    ok = start_app(Rabbit),
-    stop_join_start(Bunny, Hare),
-    assert_cluster_status(
-      {[Rabbit, Hare, Bunny], [Hare, Bunny], [Rabbit, Hare, Bunny]},
-      [Rabbit, Hare, Bunny]),
-    ok = stop_app(Rabbit),
-    ok = stop_app(Bunny),
-    ok = reset(Bunny),
-    ok = start_app(Bunny),
-    assert_not_clustered(Bunny),
-    assert_cluster_status({[Rabbit, Hare], [Hare], [Hare]}, [Hare]),
-    assert_cluster_status(
-      {[Rabbit, Hare, Bunny], [Hare, Bunny], [Hare, Bunny]},
-      [Rabbit]),
-    ok = start_app(Rabbit),
-    assert_cluster_status({[Rabbit, Hare], [Hare], [Rabbit, Hare]},
-                          [Rabbit, Hare]),
-    assert_not_clustered(Bunny).
-
-update_cluster_nodes_with() -> start_abc.
-update_cluster_nodes(Config) ->
-    [Rabbit, Hare, Bunny] = cluster_members(Config),
-
-    %% Mnesia is running...
-    assert_failure(fun () -> update_cluster_nodes(Rabbit, Hare) end),
-
-    ok = stop_app(Rabbit),
-    ok = join_cluster(Rabbit, Hare),
-    ok = stop_app(Bunny),
-    ok = join_cluster(Bunny, Hare),
-    ok = start_app(Bunny),
-    stop_reset_start(Hare),
-    assert_failure(fun () -> start_app(Rabbit) end),
-    %% Bogus node
-    assert_failure(fun () -> update_cluster_nodes(Rabbit, non@existant) end),
-    %% Inconsisent node
-    assert_failure(fun () -> update_cluster_nodes(Rabbit, Hare) end),
-    ok = update_cluster_nodes(Rabbit, Bunny),
-    ok = start_app(Rabbit),
-    assert_not_clustered(Hare),
-    assert_clustered([Rabbit, Bunny]).
-
-erlang_config_with() -> start_ab.
-erlang_config(Config) ->
-    [Rabbit, Hare] = cluster_members(Config),
-
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, {[Rabbit], disc}]),
-    ok = start_app(Hare),
-    assert_clustered([Rabbit, Hare]),
-
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, {[Rabbit], ram}]),
-    ok = start_app(Hare),
-    assert_cluster_status({[Rabbit, Hare], [Rabbit], [Rabbit, Hare]},
-                          [Rabbit, Hare]),
-
-    %% Check having a stop_app'ed node around doesn't break completely.
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = stop_app(Rabbit),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, {[Rabbit], disc}]),
-    ok = start_app(Hare),
-    ok = start_app(Rabbit),
-    assert_not_clustered(Hare),
-    assert_not_clustered(Rabbit),
-
-    %% We get a warning but we start anyway
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, {[non@existent], disc}]),
-    ok = start_app(Hare),
-    assert_not_clustered(Hare),
-    assert_not_clustered(Rabbit),
-
-    %% If we use a legacy config file, the node fails to start.
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, [Rabbit]]),
-    assert_failure(fun () -> start_app(Hare) end),
-    assert_not_clustered(Rabbit),
-
-    %% If we use an invalid node name, the node fails to start.
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, {["Mike's computer"], disc}]),
-    assert_failure(fun () -> start_app(Hare) end),
-    assert_not_clustered(Rabbit),
-
-    %% If we use an invalid node type, the node fails to start.
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, {[Rabbit], blue}]),
-    assert_failure(fun () -> start_app(Hare) end),
-    assert_not_clustered(Rabbit),
-
-    %% If we use an invalid cluster_nodes conf, the node fails to start.
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, true]),
-    assert_failure(fun () -> start_app(Hare) end),
-    assert_not_clustered(Rabbit),
-
-    ok = stop_app(Hare),
-    ok = reset(Hare),
-    ok = rpc:call(Hare, application, set_env,
-                  [rabbit, cluster_nodes, "Yes, please"]),
-    assert_failure(fun () -> start_app(Hare) end),
-    assert_not_clustered(Rabbit).
-
-force_reset_node_with() -> start_abc.
-force_reset_node(Config) ->
-    [Rabbit, Hare, _Bunny] = cluster_members(Config),
-
-    stop_join_start(Rabbit, Hare),
-    stop_app(Rabbit),
-    force_reset(Rabbit),
-    %% Hare thinks that Rabbit is still clustered
-    assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]},
-                          [Hare]),
-    %% %% ...but it isn't
-    assert_cluster_status({[Rabbit], [Rabbit], []}, [Rabbit]),
-    %% We can rejoin Rabbit and Hare
-    update_cluster_nodes(Rabbit, Hare),
-    start_app(Rabbit),
-    assert_clustered([Rabbit, Hare]).
-
-%% ----------------------------------------------------------------------------
-%% Internal utils
-
-cluster_members(Nodes) -> [pget(node,Cfg) || Cfg <- Nodes].
-
-assert_cluster_status(Status0, Nodes) ->
-    Status = {AllNodes, _, _} = sort_cluster_status(Status0),
-    wait_for_cluster_status(Status, AllNodes, Nodes).
-
-wait_for_cluster_status(Status, AllNodes, Nodes) ->
-    Max = 10000 / ?LOOP_RECURSION_DELAY,
-    wait_for_cluster_status(0, Max, Status, AllNodes, Nodes).
-
-wait_for_cluster_status(N, Max, Status, _AllNodes, Nodes) when N >= Max ->
-    erlang:error({cluster_status_max_tries_failed,
-                  [{nodes, Nodes},
-                   {expected_status, Status},
-                   {max_tried, Max}]});
-wait_for_cluster_status(N, Max, Status, AllNodes, Nodes) ->
-    case lists:all(fun (Node) ->
-                            verify_status_equal(Node, Status, AllNodes)
-                   end, Nodes) of
-        true  -> ok;
-        false -> timer:sleep(?LOOP_RECURSION_DELAY),
-                 wait_for_cluster_status(N + 1, Max, Status, AllNodes, Nodes)
-    end.
-
-verify_status_equal(Node, Status, AllNodes) ->
-    NodeStatus = sort_cluster_status(cluster_status(Node)),
-    (AllNodes =/= [Node]) =:= rpc:call(Node, rabbit_mnesia, is_clustered, [])
-        andalso NodeStatus =:= Status.
-
-cluster_status(Node) ->
-    {rpc:call(Node, rabbit_mnesia, cluster_nodes, [all]),
-     rpc:call(Node, rabbit_mnesia, cluster_nodes, [disc]),
-     rpc:call(Node, rabbit_mnesia, cluster_nodes, [running])}.
-
-sort_cluster_status({All, Disc, Running}) ->
-    {lists:sort(All), lists:sort(Disc), lists:sort(Running)}.
-
-assert_clustered(Nodes) ->
-    assert_cluster_status({Nodes, Nodes, Nodes}, Nodes).
-
-assert_not_clustered(Node) ->
-    assert_cluster_status({[Node], [Node], [Node]}, [Node]).
-
-assert_failure(Fun) ->
-    case catch Fun() of
-        {error, Reason}                -> Reason;
-        {badrpc, {'EXIT', Reason}}     -> Reason;
-        {badrpc_multi, Reason, _Nodes} -> Reason;
-        Other                          -> exit({expected_failure, Other})
-    end.
-
-stop_app(Node) ->
-    control_action(stop_app, Node).
-
-start_app(Node) ->
-    control_action(start_app, Node).
-
-join_cluster(Node, To) ->
-    join_cluster(Node, To, false).
-
-join_cluster(Node, To, Ram) ->
-    control_action(join_cluster, Node, [atom_to_list(To)], [{"--ram", Ram}]).
-
-reset(Node) ->
-    control_action(reset, Node).
-
-force_reset(Node) ->
-    control_action(force_reset, Node).
-
-forget_cluster_node(Node, Removee, RemoveWhenOffline) ->
-    control_action(forget_cluster_node, Node, [atom_to_list(Removee)],
-                   [{"--offline", RemoveWhenOffline}]).
-
-forget_cluster_node(Node, Removee) ->
-    forget_cluster_node(Node, Removee, false).
-
-change_cluster_node_type(Node, Type) ->
-    control_action(change_cluster_node_type, Node, [atom_to_list(Type)]).
-
-update_cluster_nodes(Node, DiscoveryNode) ->
-    control_action(update_cluster_nodes, Node, [atom_to_list(DiscoveryNode)]).
-
-stop_join_start(Node, ClusterTo, Ram) ->
-    ok = stop_app(Node),
-    ok = join_cluster(Node, ClusterTo, Ram),
-    ok = start_app(Node).
-
-stop_join_start(Node, ClusterTo) ->
-    stop_join_start(Node, ClusterTo, false).
-
-stop_reset_start(Node) ->
-    ok = stop_app(Node),
-    ok = reset(Node),
-    ok = start_app(Node).
-
-control_action(Command, Node) ->
-    control_action(Command, Node, [], []).
-
-control_action(Command, Node, Args) ->
-    control_action(Command, Node, Args, []).
-
-control_action(Command, Node, Args, Opts) ->
-    rpc:call(Node, rabbit_control_main, action,
-             [Command, Node, Args, Opts,
-              fun io:format/2]).
-
-declare(Ch, Name) ->
-    Res = amqp_channel:call(Ch, #'queue.declare'{durable = true,
-                                                 queue   = Name}),
-    amqp_channel:call(Ch, #'queue.bind'{queue    = Name,
-                                        exchange = <<"amq.fanout">>}),
-    Res.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl
deleted file mode 100644 (file)
index e34fd04..0000000
+++ /dev/null
@@ -1,213 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(crashing_queues).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_test_util, [set_ha_policy/3, a2b/1]).
--import(rabbit_misc, [pget/2]).
-
-crashing_unmirrored_with() -> [cluster_ab].
-crashing_unmirrored([CfgA, CfgB]) ->
-    A = pget(node, CfgA),
-    ChA = pget(channel, CfgA),
-    ConnB = pget(connection, CfgB),
-    amqp_channel:call(ChA, #'confirm.select'{}),
-    test_queue_failure(A, ChA, ConnB, 1, 0,
-                       #'queue.declare'{queue = <<"test">>, durable = true}),
-    test_queue_failure(A, ChA, ConnB, 0, 0,
-                       #'queue.declare'{queue = <<"test">>, durable = false}),
-    ok.
-
-crashing_mirrored_with() -> [cluster_ab, ha_policy_all].
-crashing_mirrored([CfgA, CfgB]) ->
-    A = pget(node, CfgA),
-    ChA = pget(channel, CfgA),
-    ConnB = pget(connection, CfgB),
-    amqp_channel:call(ChA, #'confirm.select'{}),
-    test_queue_failure(A, ChA, ConnB, 2, 1,
-                       #'queue.declare'{queue = <<"test">>, durable = true}),
-    test_queue_failure(A, ChA, ConnB, 2, 1,
-                       #'queue.declare'{queue = <<"test">>, durable = false}),
-    ok.
-
-test_queue_failure(Node, Ch, RaceConn, MsgCount, SlaveCount, Decl) ->
-    #'queue.declare_ok'{queue = QName} = amqp_channel:call(Ch, Decl),
-    publish(Ch, QName, transient),
-    publish(Ch, QName, durable),
-    Racer = spawn_declare_racer(RaceConn, Decl),
-    kill_queue(Node, QName),
-    assert_message_count(MsgCount, Ch, QName),
-    assert_slave_count(SlaveCount, Node, QName),
-    stop_declare_racer(Racer),
-    amqp_channel:call(Ch, #'queue.delete'{queue = QName}).
-
-give_up_after_repeated_crashes_with() -> [cluster_ab].
-give_up_after_repeated_crashes([CfgA, CfgB]) ->
-    A = pget(node, CfgA),
-    ChA = pget(channel, CfgA),
-    ChB = pget(channel, CfgB),
-    QName = <<"test">>,
-    amqp_channel:call(ChA, #'confirm.select'{}),
-    amqp_channel:call(ChA, #'queue.declare'{queue   = QName,
-                                            durable = true}),
-    await_state(A, QName, running),
-    publish(ChA, QName, durable),
-    kill_queue_hard(A, QName),
-    {'EXIT', _} = (catch amqp_channel:call(
-                           ChA, #'queue.declare'{queue   = QName,
-                                                 durable = true})),
-    await_state(A, QName, crashed),
-    amqp_channel:call(ChB, #'queue.delete'{queue = QName}),
-    amqp_channel:call(ChB, #'queue.declare'{queue   = QName,
-                                            durable = true}),
-    await_state(A, QName, running),
-
-    %% Since it's convenient, also test absent queue status here.
-    rabbit_test_configs:stop_node(CfgB),
-    await_state(A, QName, down),
-    ok.
-
-
-publish(Ch, QName, DelMode) ->
-    Publish = #'basic.publish'{exchange = <<>>, routing_key = QName},
-    Msg = #amqp_msg{props = #'P_basic'{delivery_mode = del_mode(DelMode)}},
-    amqp_channel:cast(Ch, Publish, Msg),
-    amqp_channel:wait_for_confirms(Ch).
-
-del_mode(transient) -> 1;
-del_mode(durable)   -> 2.
-
-spawn_declare_racer(Conn, Decl) ->
-    Self = self(),
-    spawn_link(fun() -> declare_racer_loop(Self, Conn, Decl) end).
-
-stop_declare_racer(Pid) ->
-    Pid ! stop,
-    MRef = erlang:monitor(process, Pid),
-    receive
-        {'DOWN', MRef, process, Pid, _} -> ok
-    end.
-
-declare_racer_loop(Parent, Conn, Decl) ->
-    receive
-        stop -> unlink(Parent)
-    after 0 ->
-            %% Catch here because we might happen to catch the queue
-            %% while it is in the middle of recovering and thus
-            %% explode with NOT_FOUND because crashed. Doesn't matter,
-            %% we are only in this loop to try to fool the recovery
-            %% code anyway.
-            try
-                case amqp_connection:open_channel(Conn) of
-                    {ok, Ch} -> amqp_channel:call(Ch, Decl);
-                    closing  -> ok
-                end
-            catch
-                exit:_ ->
-                    ok
-            end,
-            declare_racer_loop(Parent, Conn, Decl)
-    end.
-
-await_state(Node, QName, State) ->
-    await_state(Node, QName, State, 30000).
-
-await_state(Node, QName, State, Time) ->
-    case state(Node, QName) of
-        State ->
-            ok;
-        Other ->
-            case Time of
-                0 -> exit({timeout_awaiting_state, State, Other});
-                _ -> timer:sleep(100),
-                     await_state(Node, QName, State, Time - 100)
-            end
-    end.
-
-state(Node, QName) ->
-    V = <<"/">>,
-    Res = rabbit_misc:r(V, queue, QName),
-    [[{name,  Res},
-      {state, State}]] =
-        rpc:call(Node, rabbit_amqqueue, info_all, [V, [name, state]]),
-    State.
-
-kill_queue_hard(Node, QName) ->
-    case kill_queue(Node, QName) of
-        crashed -> ok;
-        _NewPid -> timer:sleep(100),
-                   kill_queue_hard(Node, QName)
-    end.
-
-kill_queue(Node, QName) ->
-    Pid1 = queue_pid(Node, QName),
-    exit(Pid1, boom),
-    await_new_pid(Node, QName, Pid1).
-
-queue_pid(Node, QName) ->
-    #amqqueue{pid   = QPid,
-              state = State} = lookup(Node, QName),
-    case State of
-        crashed -> case sup_child(Node, rabbit_amqqueue_sup_sup) of
-                       {ok, _}           -> QPid;   %% restarting
-                       {error, no_child} -> crashed %% given up
-                   end;
-        _       -> QPid
-    end.
-
-sup_child(Node, Sup) ->
-    case rpc:call(Node, supervisor2, which_children, [Sup]) of
-        [{_, Child, _, _}]              -> {ok, Child};
-        []                              -> {error, no_child};
-        {badrpc, {'EXIT', {noproc, _}}} -> {error, no_sup}
-    end.
-
-lookup(Node, QName) ->
-    {ok, Q} = rpc:call(Node, rabbit_amqqueue, lookup,
-                       [rabbit_misc:r(<<"/">>, queue, QName)]),
-    Q.
-
-await_new_pid(Node, QName, OldPid) ->
-    case queue_pid(Node, QName) of
-        OldPid -> timer:sleep(10),
-                  await_new_pid(Node, QName, OldPid);
-        New    -> New
-    end.
-
-assert_message_count(Count, Ch, QName) ->
-    #'queue.declare_ok'{message_count = Count} =
-        amqp_channel:call(Ch, #'queue.declare'{queue   = QName,
-                                               passive = true}).
-
-assert_slave_count(Count, Node, QName) ->
-    Q = lookup(Node, QName),
-    [{_, Pids}] = rpc:call(Node, rabbit_amqqueue, info, [Q, [slave_pids]]),
-    RealCount = case Pids of
-                    '' -> 0;
-                    _  -> length(Pids)
-                end,
-    case RealCount of
-        Count ->
-            ok;
-        _ when RealCount < Count ->
-            timer:sleep(10),
-            assert_slave_count(Count, Node, QName);
-        _ ->
-            exit({too_many_slaves, Count, RealCount})
-    end.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl
deleted file mode 100644 (file)
index e9acb52..0000000
+++ /dev/null
@@ -1,254 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(dynamic_ha).
-
-%% rabbit_tests:test_dynamic_mirroring() is a unit test which should
-%% test the logic of what all the policies decide to do, so we don't
-%% need to exhaustively test that here. What we need to test is that:
-%%
-%% * Going from non-mirrored to mirrored works and vice versa
-%% * Changing policy can add / remove mirrors and change the master
-%% * Adding a node will create a new mirror when there are not enough nodes
-%%   for the policy
-%% * Removing a node will not create a new mirror even if the policy
-%%   logic wants it (since this gives us a good way to lose messages
-%%   on cluster shutdown, by repeated failover to new nodes)
-%%
-%% The first two are change_policy, the last two are change_cluster
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--define(QNAME, <<"ha.test">>).
--define(POLICY, <<"^ha.test$">>). %% " emacs
--define(VHOST, <<"/">>).
-
--import(rabbit_test_util, [set_ha_policy/3, set_ha_policy/4,
-                           clear_policy/2, a2b/1, publish/3, consume/3]).
--import(rabbit_misc, [pget/2]).
-
-change_policy_with() -> cluster_abc.
-change_policy([CfgA, _CfgB, _CfgC] = Cfgs) ->
-    ACh = pget(channel, CfgA),
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-
-    %% When we first declare a queue with no policy, it's not HA.
-    amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME}),
-    assert_slaves(A, ?QNAME, {A, ''}),
-
-    %% Give it policy "all", it becomes HA and gets all mirrors
-    set_ha_policy(CfgA, ?POLICY, <<"all">>),
-    assert_slaves(A, ?QNAME, {A, [B, C]}),
-
-    %% Give it policy "nodes", it gets specific mirrors
-    set_ha_policy(CfgA, ?POLICY, {<<"nodes">>, [a2b(A), a2b(B)]}),
-    assert_slaves(A, ?QNAME, {A, [B]}),
-
-    %% Now explicitly change the mirrors
-    set_ha_policy(CfgA, ?POLICY, {<<"nodes">>, [a2b(A), a2b(C)]}),
-    assert_slaves(A, ?QNAME, {A, [C]}, [{A, [B, C]}]),
-
-    %% Clear the policy, and we go back to non-mirrored
-    clear_policy(CfgA, ?POLICY),
-    assert_slaves(A, ?QNAME, {A, ''}),
-
-    %% Test switching "away" from an unmirrored node
-    set_ha_policy(CfgA, ?POLICY, {<<"nodes">>, [a2b(B), a2b(C)]}),
-    assert_slaves(A, ?QNAME, {A, [B, C]}, [{A, [B]}, {A, [C]}]),
-
-    ok.
-
-change_cluster_with() -> cluster_abc.
-change_cluster([CfgA, _CfgB, _CfgC] = CfgsABC) ->
-    ACh = pget(channel, CfgA),
-    [A, B, C] = [pget(node, Cfg) || Cfg <- CfgsABC],
-
-    amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME}),
-    assert_slaves(A, ?QNAME, {A, ''}),
-
-    %% Give it policy exactly 4, it should mirror to all 3 nodes
-    set_ha_policy(CfgA, ?POLICY, {<<"exactly">>, 4}),
-    assert_slaves(A, ?QNAME, {A, [B, C]}),
-
-    %% Add D and E, D joins in
-    [CfgD, CfgE] = CfgsDE = rabbit_test_configs:start_nodes(CfgA, [d, e], 5675),
-    D = pget(node, CfgD),
-    E = pget(node, CfgE),
-    rabbit_test_configs:add_to_cluster(CfgsABC, CfgsDE),
-    assert_slaves(A, ?QNAME, {A, [B, C, D]}),
-
-    %% Remove D, E joins in
-    rabbit_test_configs:stop_node(CfgD),
-    assert_slaves(A, ?QNAME, {A, [B, C, E]}),
-
-    %% Clean up since we started this by hand
-    rabbit_test_configs:stop_node(CfgE),
-    ok.
-
-rapid_change_with() -> cluster_abc.
-rapid_change([CfgA, _CfgB, _CfgC]) ->
-    ACh = pget(channel, CfgA),
-    {_Pid, MRef} = spawn_monitor(
-                     fun() ->
-                             [rapid_amqp_ops(ACh, I) || I <- lists:seq(1, 100)]
-                     end),
-    rapid_loop(CfgA, MRef),
-    ok.
-
-rapid_amqp_ops(Ch, I) ->
-    Payload = list_to_binary(integer_to_list(I)),
-    amqp_channel:call(Ch, #'queue.declare'{queue = ?QNAME}),
-    amqp_channel:cast(Ch, #'basic.publish'{exchange = <<"">>,
-                                           routing_key = ?QNAME},
-                      #amqp_msg{payload = Payload}),
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue    = ?QNAME,
-                                                no_ack   = true}, self()),
-    receive #'basic.consume_ok'{} -> ok
-    end,
-    receive {#'basic.deliver'{}, #amqp_msg{payload = Payload}} ->
-            ok
-    end,
-    amqp_channel:call(Ch, #'queue.delete'{queue = ?QNAME}).
-
-rapid_loop(Cfg, MRef) ->
-    receive
-        {'DOWN', MRef, process, _Pid, normal} ->
-            ok;
-        {'DOWN', MRef, process, _Pid, Reason} ->
-            exit({amqp_ops_died, Reason})
-    after 0 ->
-            set_ha_policy(Cfg, ?POLICY, <<"all">>),
-            clear_policy(Cfg, ?POLICY),
-            rapid_loop(Cfg, MRef)
-    end.
-
-%% Vhost deletion needs to successfully tear down policies and queues
-%% with policies. At least smoke-test that it doesn't blow up.
-vhost_deletion_with() -> [cluster_ab, ha_policy_all].
-vhost_deletion([CfgA, _CfgB]) ->
-    ACh = pget(channel, CfgA),
-    Node = pget(node, CfgA),
-    amqp_channel:call(ACh, #'queue.declare'{queue = <<"test">>}),
-    ok = rpc:call(Node, rabbit_vhost, delete, [<<"/">>]),
-    ok.
-
-promote_on_shutdown_with() -> cluster_ab.
-promote_on_shutdown([CfgA, CfgB]) ->
-    set_ha_policy(CfgA, <<"^ha.promote">>, <<"all">>,
-                  [{<<"ha-promote-on-shutdown">>, <<"always">>}]),
-    set_ha_policy(CfgA, <<"^ha.nopromote">>, <<"all">>),
-
-    ACh = pget(channel, CfgA),
-    [begin
-         amqp_channel:call(ACh, #'queue.declare'{queue   = Q,
-                                                 durable = true}),
-         publish(ACh, Q, 10)
-     end || Q <- [<<"ha.promote.test">>, <<"ha.nopromote.test">>]],
-    rabbit_test_configs:restart_node(CfgB),
-    CfgA1 = rabbit_test_configs:stop_node(CfgA),
-    {_, BCh} =  rabbit_test_util:connect(CfgB),
-    #'queue.declare_ok'{message_count = 0} = 
-        amqp_channel:call(
-          BCh, #'queue.declare'{queue   = <<"ha.promote.test">>,
-                                durable = true}),
-    ?assertExit(
-       {{shutdown, {server_initiated_close, 404, _}}, _},
-       amqp_channel:call(
-         BCh, #'queue.declare'{queue   = <<"ha.nopromote.test">>,
-                               durable = true})),
-    CfgA2 = rabbit_test_configs:start_node(CfgA1),
-    {_, ACh2} =  rabbit_test_util:connect(CfgA2),
-    #'queue.declare_ok'{message_count = 10} =
-        amqp_channel:call(
-          ACh2, #'queue.declare'{queue   = <<"ha.nopromote.test">>,
-                                 durable = true}),
-    ok.
-
-%%----------------------------------------------------------------------------
-
-assert_slaves(RPCNode, QName, Exp) ->
-    assert_slaves(RPCNode, QName, Exp, []).
-
-assert_slaves(RPCNode, QName, Exp, PermittedIntermediate) ->
-    assert_slaves0(RPCNode, QName, Exp,
-                  [{get(previous_exp_m_node), get(previous_exp_s_nodes)} |
-                   PermittedIntermediate]).
-
-assert_slaves0(RPCNode, QName, {ExpMNode, ExpSNodes}, PermittedIntermediate) ->
-    Q = find_queue(QName, RPCNode),
-    Pid = proplists:get_value(pid, Q),
-    SPids = proplists:get_value(slave_pids, Q),
-    ActMNode = node(Pid),
-    ActSNodes = case SPids of
-                    '' -> '';
-                    _  -> [node(SPid) || SPid <- SPids]
-                end,
-    case ExpMNode =:= ActMNode andalso equal_list(ExpSNodes, ActSNodes) of
-        false ->
-            %% It's an async change, so if nothing has changed let's
-            %% just wait - of course this means if something does not
-            %% change when expected then we time out the test which is
-            %% a bit tedious
-            case [found || {PermMNode, PermSNodes} <- PermittedIntermediate,
-                           PermMNode =:= ActMNode,
-                           equal_list(PermSNodes, ActSNodes)] of
-                [] -> ct:fail("Expected ~p / ~p, got ~p / ~p~nat ~p~n",
-                              [ExpMNode, ExpSNodes, ActMNode, ActSNodes,
-                               get_stacktrace()]);
-                _  -> timer:sleep(100),
-                      assert_slaves0(RPCNode, QName, {ExpMNode, ExpSNodes},
-                                     PermittedIntermediate)
-            end;
-        true ->
-            put(previous_exp_m_node, ExpMNode),
-            put(previous_exp_s_nodes, ExpSNodes),
-            ok
-    end.
-
-equal_list('',    '')   -> true;
-equal_list('',    _Act) -> false;
-equal_list(_Exp,  '')   -> false;
-equal_list([],    [])   -> true;
-equal_list(_Exp,  [])   -> false;
-equal_list([],    _Act) -> false;
-equal_list([H|T], Act)  -> case lists:member(H, Act) of
-                               true  -> equal_list(T, Act -- [H]);
-                               false -> false
-                           end.
-
-find_queue(QName, RPCNode) ->
-    Qs = rpc:call(RPCNode, rabbit_amqqueue, info_all, [?VHOST], infinity),
-    case find_queue0(QName, Qs) of
-        did_not_find_queue -> timer:sleep(100),
-                              find_queue(QName, RPCNode);
-        Q -> Q
-    end.
-
-find_queue0(QName, Qs) ->
-    case [Q || Q <- Qs, proplists:get_value(name, Q) =:=
-                   rabbit_misc:r(?VHOST, queue, QName)] of
-        [R] -> R;
-        []  -> did_not_find_queue
-    end.
-
-get_stacktrace() ->
-    try
-        throw(e)
-    catch
-        _:e ->
-            erlang:get_stacktrace()
-    end.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl
deleted file mode 100644 (file)
index 9c2d935..0000000
+++ /dev/null
@@ -1,205 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(eager_sync).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--define(QNAME, <<"ha.two.test">>).
--define(QNAME_AUTO, <<"ha.auto.test">>).
--define(MESSAGE_COUNT, 2000).
-
--import(rabbit_test_util, [a2b/1, publish/3, consume/3, fetch/3]).
--import(rabbit_misc, [pget/2]).
-
--define(CONFIG, [cluster_abc, ha_policy_two_pos]).
-
-eager_sync_with() -> ?CONFIG.
-eager_sync([A, B, C]) ->
-    %% Queue is on AB but not C.
-    ACh = pget(channel, A),
-    Ch = pget(channel, C),
-    amqp_channel:call(ACh, #'queue.declare'{queue   = ?QNAME,
-                                            durable = true}),
-
-    %% Don't sync, lose messages
-    publish(Ch, ?QNAME, ?MESSAGE_COUNT),
-    restart(A),
-    restart(B),
-    consume(Ch, ?QNAME, 0),
-
-    %% Sync, keep messages
-    publish(Ch, ?QNAME, ?MESSAGE_COUNT),
-    restart(A),
-    ok = sync(C, ?QNAME),
-    restart(B),
-    consume(Ch, ?QNAME, ?MESSAGE_COUNT),
-
-    %% Check the no-need-to-sync path
-    publish(Ch, ?QNAME, ?MESSAGE_COUNT),
-    ok = sync(C, ?QNAME),
-    consume(Ch, ?QNAME, ?MESSAGE_COUNT),
-
-    %% keep unacknowledged messages
-    publish(Ch, ?QNAME, ?MESSAGE_COUNT),
-    fetch(Ch, ?QNAME, 2),
-    restart(A),
-    fetch(Ch, ?QNAME, 3),
-    sync(C, ?QNAME),
-    restart(B),
-    consume(Ch, ?QNAME, ?MESSAGE_COUNT),
-
-    ok.
-
-eager_sync_cancel_with() -> ?CONFIG.
-eager_sync_cancel([A, B, C]) ->
-    %% Queue is on AB but not C.
-    ACh = pget(channel, A),
-    Ch = pget(channel, C),
-
-    amqp_channel:call(ACh, #'queue.declare'{queue   = ?QNAME,
-                                            durable = true}),
-    {ok, not_syncing} = sync_cancel(C, ?QNAME), %% Idempotence
-    eager_sync_cancel_test2(A, B, C, Ch).
-
-eager_sync_cancel_test2(A, B, C, Ch) ->
-    %% Sync then cancel
-    publish(Ch, ?QNAME, ?MESSAGE_COUNT),
-    restart(A),
-    spawn_link(fun() -> ok = sync_nowait(C, ?QNAME) end),
-    case wait_for_syncing(C, ?QNAME, 1) of
-        ok ->
-            case sync_cancel(C, ?QNAME) of
-                ok ->
-                    wait_for_running(C, ?QNAME),
-                    restart(B),
-                    consume(Ch, ?QNAME, 0),
-
-                    {ok, not_syncing} = sync_cancel(C, ?QNAME), %% Idempotence
-                    ok;
-                {ok, not_syncing} ->
-                    %% Damn. Syncing finished between wait_for_syncing/3 and
-                    %% sync_cancel/2 above. Start again.
-                    amqp_channel:call(Ch, #'queue.purge'{queue = ?QNAME}),
-                    eager_sync_cancel_test2(A, B, C, Ch)
-            end;
-        synced_already ->
-            %% Damn. Syncing finished before wait_for_syncing/3. Start again.
-            amqp_channel:call(Ch, #'queue.purge'{queue = ?QNAME}),
-            eager_sync_cancel_test2(A, B, C, Ch)
-    end.
-
-eager_sync_auto_with() -> ?CONFIG.
-eager_sync_auto([A, B, C]) ->
-    ACh = pget(channel, A),
-    Ch = pget(channel, C),
-    amqp_channel:call(ACh, #'queue.declare'{queue   = ?QNAME_AUTO,
-                                            durable = true}),
-
-    %% Sync automatically, don't lose messages
-    publish(Ch, ?QNAME_AUTO, ?MESSAGE_COUNT),
-    restart(A),
-    wait_for_sync(C, ?QNAME_AUTO),
-    restart(B),
-    wait_for_sync(C, ?QNAME_AUTO),
-    consume(Ch, ?QNAME_AUTO, ?MESSAGE_COUNT),
-
-    ok.
-
-eager_sync_auto_on_policy_change_with() -> ?CONFIG.
-eager_sync_auto_on_policy_change([A, B, C]) ->
-    ACh = pget(channel, A),
-    Ch = pget(channel, C),
-    amqp_channel:call(ACh, #'queue.declare'{queue   = ?QNAME,
-                                            durable = true}),
-
-    %% Sync automatically once the policy is changed to tell us to.
-    publish(Ch, ?QNAME, ?MESSAGE_COUNT),
-    restart(A),
-    Params = [a2b(pget(node, Cfg)) || Cfg <- [A, B]],
-    rabbit_test_util:set_ha_policy(
-      A, <<"^ha.two.">>, {<<"nodes">>, Params},
-      [{<<"ha-sync-mode">>, <<"automatic">>}]),
-    wait_for_sync(C, ?QNAME),
-
-    ok.
-
-eager_sync_requeue_with() -> ?CONFIG.
-eager_sync_requeue([A, B, C]) ->
-    %% Queue is on AB but not C.
-    ACh = pget(channel, A),
-    Ch = pget(channel, C),
-    amqp_channel:call(ACh, #'queue.declare'{queue   = ?QNAME,
-                                            durable = true}),
-
-    publish(Ch, ?QNAME, 2),
-    {#'basic.get_ok'{delivery_tag = TagA}, _} =
-        amqp_channel:call(Ch, #'basic.get'{queue = ?QNAME}),
-    {#'basic.get_ok'{delivery_tag = TagB}, _} =
-        amqp_channel:call(Ch, #'basic.get'{queue = ?QNAME}),
-    amqp_channel:cast(Ch, #'basic.reject'{delivery_tag = TagA, requeue = true}),
-    restart(B),
-    ok = sync(C, ?QNAME),
-    amqp_channel:cast(Ch, #'basic.reject'{delivery_tag = TagB, requeue = true}),
-    consume(Ch, ?QNAME, 2),
-
-    ok.
-
-restart(Cfg) -> rabbit_test_util:restart_app(Cfg).
-
-sync(Cfg, QName) ->
-    case sync_nowait(Cfg, QName) of
-        ok -> wait_for_sync(Cfg, QName),
-              ok;
-        R  -> R
-    end.
-
-sync_nowait(Cfg, QName) -> action(Cfg, sync_queue, QName).
-sync_cancel(Cfg, QName) -> action(Cfg, cancel_sync_queue, QName).
-
-wait_for_sync(Cfg, QName) ->
-    sync_detection:wait_for_sync_status(true, Cfg, QName).
-
-action(Cfg, Action, QName) ->
-    rabbit_test_util:control_action(
-      Action, Cfg, [binary_to_list(QName)], [{"-p", "/"}]).
-
-queue(Cfg, QName) ->
-    QNameRes = rabbit_misc:r(<<"/">>, queue, QName),
-    {ok, Q} = rpc:call(pget(node, Cfg), rabbit_amqqueue, lookup, [QNameRes]),
-    Q.
-
-wait_for_syncing(Cfg, QName, Target) ->
-    case state(Cfg, QName) of
-        {{syncing, _}, _} -> ok;
-        {running, Target} -> synced_already;
-        _                 -> timer:sleep(100),
-                             wait_for_syncing(Cfg, QName, Target)
-    end.
-
-wait_for_running(Cfg, QName) ->
-    case state(Cfg, QName) of
-        {running, _} -> ok;
-        _            -> timer:sleep(100),
-                        wait_for_running(Cfg, QName)
-    end.
-
-state(Cfg, QName) ->
-    [{state, State}, {synchronised_slave_pids, Pids}] =
-        rpc:call(pget(node, Cfg), rabbit_amqqueue, info,
-                 [queue(Cfg, QName), [state, synchronised_slave_pids]]),
-    {State, length(Pids)}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl
deleted file mode 100644 (file)
index 9104d4c..0000000
+++ /dev/null
@@ -1,64 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(many_node_ha).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_test_util, [a2b/1]).
--import(rabbit_misc, [pget/2]).
-
-kill_intermediate_with() ->
-    fun (Cfg) -> rabbit_test_configs:ha_policy_all(
-                   rabbit_test_configs:cluster(Cfg, [a,b,c,d,e,f]))
-    end.
-kill_intermediate([CfgA, CfgB, CfgC, CfgD, CfgE, CfgF]) ->
-    Msgs            = rabbit_test_configs:cover_work_factor(20000, CfgA),
-    MasterChannel   = pget(channel, CfgA),
-    ConsumerChannel = pget(channel, CfgE),
-    ProducerChannel = pget(channel, CfgF),
-    Queue = <<"test">>,
-    amqp_channel:call(MasterChannel, #'queue.declare'{queue       = Queue,
-                                                      auto_delete = false}),
-
-    %% TODO: this seems *highly* timing dependant - the assumption being
-    %% that the kill will work quickly enough that there will still be
-    %% some messages in-flight that we *must* receive despite the intervening
-    %% node deaths. It would be nice if we could find a means to do this
-    %% in a way that is not actually timing dependent.
-
-    %% Worse still, it assumes that killing the master will cause a
-    %% failover to Slave1, and so on. Nope.
-
-    ConsumerPid = rabbit_ha_test_consumer:create(ConsumerChannel,
-                                                 Queue, self(), false, Msgs),
-
-    ProducerPid = rabbit_ha_test_producer:create(ProducerChannel,
-                                                 Queue, self(), false, Msgs),
-
-    %% create a killer for the master and the first 3 slaves
-    [rabbit_test_util:kill_after(Time, Cfg, sigkill) ||
-        {Cfg, Time} <- [{CfgA, 50},
-                        {CfgB, 50},
-                        {CfgC, 100},
-                        {CfgD, 100}]],
-
-    %% verify that the consumer got all msgs, or die, or time out
-    rabbit_ha_test_producer:await_response(ProducerPid),
-    rabbit_ha_test_consumer:await_response(ConsumerPid),
-    ok.
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl
deleted file mode 100644 (file)
index 56b99ca..0000000
+++ /dev/null
@@ -1,370 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(partitions).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
-
--define(CONFIG, [start_abc, fun enable_dist_proxy/1,
-                 build_cluster, short_ticktime(1), start_connections]).
-%% We set ticktime to 1s and setuptime is 7s so to make sure it
-%% passes...
--define(DELAY, 8000).
-
-ignore_with() -> ?CONFIG.
-ignore(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    block_unblock([{A, B}, {A, C}]),
-    timer:sleep(?DELAY),
-    [B, C] = partitions(A),
-    [A] = partitions(B),
-    [A] = partitions(C),
-    ok.
-
-pause_minority_on_down_with() -> ?CONFIG.
-pause_minority_on_down([CfgA, CfgB, CfgC] = Cfgs) ->
-    A = pget(node, CfgA),
-    set_mode(Cfgs, pause_minority),
-    true = is_running(A),
-
-    rabbit_test_util:kill(CfgB, sigkill),
-    timer:sleep(?DELAY),
-    true = is_running(A),
-
-    rabbit_test_util:kill(CfgC, sigkill),
-    await_running(A, false),
-    ok.
-
-pause_minority_on_blocked_with() -> ?CONFIG.
-pause_minority_on_blocked(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    set_mode(Cfgs, pause_minority),
-    pause_on_blocked(A, B, C).
-
-pause_if_all_down_on_down_with() -> ?CONFIG.
-pause_if_all_down_on_down([_, CfgB, CfgC] = Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    set_mode(Cfgs, {pause_if_all_down, [C], ignore}),
-    [(true = is_running(N)) || N <- [A, B, C]],
-
-    rabbit_test_util:kill(CfgB, sigkill),
-    timer:sleep(?DELAY),
-    [(true = is_running(N)) || N <- [A, C]],
-
-    rabbit_test_util:kill(CfgC, sigkill),
-    timer:sleep(?DELAY),
-    await_running(A, false),
-    ok.
-
-pause_if_all_down_on_blocked_with() -> ?CONFIG.
-pause_if_all_down_on_blocked(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    set_mode(Cfgs, {pause_if_all_down, [C], ignore}),
-    pause_on_blocked(A, B, C).
-
-pause_on_blocked(A, B, C) ->
-    [(true = is_running(N)) || N <- [A, B, C]],
-    block([{A, B}, {A, C}]),
-    await_running(A, false),
-    [await_running(N, true) || N <- [B, C]],
-    unblock([{A, B}, {A, C}]),
-    [await_running(N, true) || N <- [A, B, C]],
-    Status = rpc:call(B, rabbit_mnesia, status, []),
-    [] = pget(partitions, Status),
-    ok.
-
-%% Make sure we do not confirm any messages after a partition has
-%% happened but before we pause, since any such confirmations would be
-%% lies.
-%%
-%% This test has to use an AB cluster (not ABC) since GM ends up
-%% taking longer to detect down slaves when there are more nodes and
-%% we close the window by mistake.
-%%
-%% In general there are quite a few ways to accidentally cause this
-%% test to pass since there are a lot of things in the broker that can
-%% suddenly take several seconds to time out when TCP connections
-%% won't establish.
-pause_minority_false_promises_mirrored_with() ->
-    [start_ab, fun enable_dist_proxy/1,
-     build_cluster, short_ticktime(10), start_connections, ha_policy_all].
-
-pause_minority_false_promises_mirrored(Cfgs) ->
-    pause_false_promises(Cfgs, pause_minority).
-
-pause_minority_false_promises_unmirrored_with() ->
-    [start_ab, fun enable_dist_proxy/1,
-     build_cluster, short_ticktime(10), start_connections].
-
-pause_minority_false_promises_unmirrored(Cfgs) ->
-    pause_false_promises(Cfgs, pause_minority).
-
-pause_if_all_down_false_promises_mirrored_with() ->
-    [start_ab, fun enable_dist_proxy/1,
-     build_cluster, short_ticktime(10), start_connections, ha_policy_all].
-
-pause_if_all_down_false_promises_mirrored([_, CfgB | _] = Cfgs) ->
-    B = pget(node, CfgB),
-    pause_false_promises(Cfgs, {pause_if_all_down, [B], ignore}).
-
-pause_if_all_down_false_promises_unmirrored_with() ->
-    [start_ab, fun enable_dist_proxy/1,
-     build_cluster, short_ticktime(10), start_connections].
-
-pause_if_all_down_false_promises_unmirrored([_, CfgB | _] = Cfgs) ->
-    B = pget(node, CfgB),
-    pause_false_promises(Cfgs, {pause_if_all_down, [B], ignore}).
-
-pause_false_promises([CfgA, CfgB | _] = Cfgs, ClusterPartitionHandling) ->
-    [A, B] = [pget(node, Cfg) || Cfg <- Cfgs],
-    set_mode([CfgA], ClusterPartitionHandling),
-    ChA = pget(channel, CfgA),
-    ChB = pget(channel, CfgB),
-    amqp_channel:call(ChB, #'queue.declare'{queue   = <<"test">>,
-                                            durable = true}),
-    amqp_channel:call(ChA, #'confirm.select'{}),
-    amqp_channel:register_confirm_handler(ChA, self()),
-
-    %% Cause a partition after 1s
-    Self = self(),
-    spawn_link(fun () ->
-                       timer:sleep(1000),
-                       %%io:format(user, "~p BLOCK~n", [calendar:local_time()]),
-                       block([{A, B}]),
-                       unlink(Self)
-               end),
-
-    %% Publish large no of messages, see how many we get confirmed
-    [amqp_channel:cast(ChA, #'basic.publish'{routing_key = <<"test">>},
-                       #amqp_msg{props = #'P_basic'{delivery_mode = 1}}) ||
-        _ <- lists:seq(1, 100000)],
-    %%io:format(user, "~p finish publish~n", [calendar:local_time()]),
-
-    %% Time for the partition to be detected. We don't put this sleep
-    %% in receive_acks since otherwise we'd have another similar sleep
-    %% at the end.
-    timer:sleep(30000),
-    Confirmed = receive_acks(0),
-    %%io:format(user, "~p got acks~n", [calendar:local_time()]),
-    await_running(A, false),
-    %%io:format(user, "~p A stopped~n", [calendar:local_time()]),
-
-    unblock([{A, B}]),
-    await_running(A, true),
-
-    %% But how many made it onto the rest of the cluster?
-    #'queue.declare_ok'{message_count = Survived} = 
-        amqp_channel:call(ChB, #'queue.declare'{queue   = <<"test">>,
-                                                durable = true}),
-    %%io:format(user, "~p queue declared~n", [calendar:local_time()]),
-    case Confirmed > Survived of
-        true  -> ?debugVal({Confirmed, Survived});
-        false -> ok
-    end,
-    ?assert(Confirmed =< Survived),
-    ok.
-
-receive_acks(Max) ->
-    receive
-        #'basic.ack'{delivery_tag = DTag} ->
-            receive_acks(DTag)
-    after ?DELAY ->
-            Max
-    end.
-
-prompt_disconnect_detection_with() ->
-    [start_ab, fun enable_dist_proxy/1,
-     build_cluster, short_ticktime(1), start_connections].
-
-prompt_disconnect_detection([CfgA, CfgB]) ->
-    A = pget(node, CfgA),
-    B = pget(node, CfgB),
-    ChB = pget(channel, CfgB),
-    [amqp_channel:call(ChB, #'queue.declare'{}) || _ <- lists:seq(1, 100)],
-    block([{A, B}]),
-    timer:sleep(?DELAY),
-    %% We want to make sure we do not end up waiting for setuptime *
-    %% no of queues. Unfortunately that means we need a timeout...
-    [] = rpc(CfgA, rabbit_amqqueue, info_all, [<<"/">>], ?DELAY),
-    ok.
-
-ctl_ticktime_sync_with() -> [start_ab, short_ticktime(1)].
-ctl_ticktime_sync([CfgA | _]) ->
-    %% Server has 1s net_ticktime, make sure ctl doesn't get disconnected
-    "ok\n" = rabbit_test_configs:rabbitmqctl(CfgA, "eval 'timer:sleep(5000).'"),
-    ok.
-
-%% NB: we test full and partial partitions here.
-autoheal_with() -> ?CONFIG.
-autoheal(Cfgs) ->
-    set_mode(Cfgs, autoheal),
-    do_autoheal(Cfgs).
-
-autoheal_after_pause_if_all_down_with() -> ?CONFIG.
-autoheal_after_pause_if_all_down([_, CfgB, CfgC | _] = Cfgs) ->
-    B = pget(node, CfgB),
-    C = pget(node, CfgC),
-    set_mode(Cfgs, {pause_if_all_down, [B, C], autoheal}),
-    do_autoheal(Cfgs).
-
-do_autoheal(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    Test = fun (Pairs) ->
-                   block_unblock(Pairs),
-                   %% Sleep to make sure all the partitions are noticed
-                   %% ?DELAY for the net_tick timeout
-                   timer:sleep(?DELAY),
-                   [await_listening(N, true) || N <- [A, B, C]],
-                   [await_partitions(N, []) || N <- [A, B, C]]
-           end,
-    Test([{B, C}]),
-    Test([{A, C}, {B, C}]),
-    Test([{A, B}, {A, C}, {B, C}]),
-    ok.
-
-partial_false_positive_with() -> ?CONFIG.
-partial_false_positive(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    block([{A, B}]),
-    timer:sleep(1000),
-    block([{A, C}]),
-    timer:sleep(?DELAY),
-    unblock([{A, B}, {A, C}]),
-    timer:sleep(?DELAY),
-    %% When B times out A's connection, it will check with C. C will
-    %% not have timed out A yet, but already it can't talk to it. We
-    %% need to not consider this a partial partition; B and C should
-    %% still talk to each other.
-    [B, C] = partitions(A),
-    [A] = partitions(B),
-    [A] = partitions(C),
-    ok.
-
-partial_to_full_with() -> ?CONFIG.
-partial_to_full(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    block_unblock([{A, B}]),
-    timer:sleep(?DELAY),
-    %% There are several valid ways this could go, depending on how
-    %% the DOWN messages race: either A gets disconnected first and BC
-    %% stay together, or B gets disconnected first and AC stay
-    %% together, or both make it through and all three get
-    %% disconnected.
-    case {partitions(A), partitions(B), partitions(C)} of
-        {[B, C], [A],    [A]}    -> ok;
-        {[B],    [A, C], [B]}    -> ok;
-        {[B, C], [A, C], [A, B]} -> ok;
-        Partitions               -> exit({partitions, Partitions})
-    end.
-
-partial_pause_minority_with() -> ?CONFIG.
-partial_pause_minority(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    set_mode(Cfgs, pause_minority),
-    block([{A, B}]),
-    [await_running(N, false) || N <- [A, B]],
-    await_running(C, true),
-    unblock([{A, B}]),
-    [await_listening(N, true) || N <- [A, B, C]],
-    [await_partitions(N, []) || N <- [A, B, C]],
-    ok.
-
-partial_pause_if_all_down_with() -> ?CONFIG.
-partial_pause_if_all_down(Cfgs) ->
-    [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs],
-    set_mode(Cfgs, {pause_if_all_down, [B], ignore}),
-    block([{A, B}]),
-    await_running(A, false),
-    [await_running(N, true) || N <- [B, C]],
-    unblock([{A, B}]),
-    [await_listening(N, true) || N <- [A, B, C]],
-    [await_partitions(N, []) || N <- [A, B, C]],
-    ok.
-
-set_mode(Cfgs, Mode) ->
-    [set_env(Cfg, rabbit, cluster_partition_handling, Mode) || Cfg <- Cfgs].
-
-set_env(Cfg, App, K, V) ->
-    rpc(Cfg, application, set_env, [App, K, V]).
-
-block_unblock(Pairs) ->
-    block(Pairs),
-    timer:sleep(?DELAY),
-    unblock(Pairs).
-
-block(Pairs)   -> [block(X, Y) || {X, Y} <- Pairs].
-unblock(Pairs) -> [allow(X, Y) || {X, Y} <- Pairs].
-
-partitions(Node) ->
-    case rpc:call(Node, rabbit_node_monitor, partitions, []) of
-        {badrpc, {'EXIT', E}} = R -> case rabbit_misc:is_abnormal_exit(E) of
-                                         true  -> R;
-                                         false -> timer:sleep(1000),
-                                                  partitions(Node)
-                                     end;
-        Partitions                -> Partitions
-    end.
-
-block(X, Y) ->
-    rpc:call(X, inet_tcp_proxy, block, [Y]),
-    rpc:call(Y, inet_tcp_proxy, block, [X]).
-
-allow(X, Y) ->
-    rpc:call(X, inet_tcp_proxy, allow, [Y]),
-    rpc:call(Y, inet_tcp_proxy, allow, [X]).
-
-await_running   (Node, Bool)  -> await(Node, Bool,  fun is_running/1).
-await_listening (Node, Bool)  -> await(Node, Bool,  fun is_listening/1).
-await_partitions(Node, Parts) -> await(Node, Parts, fun partitions/1).
-
-await(Node, Res, Fun) ->
-    case Fun(Node) of
-        Res -> ok;
-        _   -> timer:sleep(100),
-               await(Node, Res, Fun)
-    end.
-
-is_running(Node) -> rpc:call(Node, rabbit, is_running, []).
-
-is_listening(Node) ->
-    case rpc:call(Node, rabbit_networking, node_listeners, [Node]) of
-        []    -> false;
-        [_|_] -> true;
-        _     -> false
-    end.
-
-enable_dist_proxy(Cfgs) ->
-    inet_tcp_proxy_manager:start_link(),
-    Nodes = [pget(node, Cfg) || Cfg <- Cfgs],
-    [ok = rpc:call(Node, inet_tcp_proxy, start, []) || Node <- Nodes],
-    [ok = rpc:call(Node, inet_tcp_proxy, reconnect, [Nodes]) || Node <- Nodes],
-    Cfgs.
-
-short_ticktime(Time) ->
-    fun (Cfgs) ->
-            [rpc(Cfg, net_kernel, set_net_ticktime, [Time, 0]) || Cfg <- Cfgs],
-            net_kernel:set_net_ticktime(Time, 0),
-            Cfgs
-    end.
-
-rpc(Cfg, M, F, A) ->
-    rpc:call(pget(node, Cfg), M, F, A).
-
-rpc(Cfg, M, F, A, T) ->
-    rpc:call(pget(node, Cfg), M, F, A, T).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl
deleted file mode 100644 (file)
index 44228ff..0000000
+++ /dev/null
@@ -1,335 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_priority_queue_test).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_misc, [pget/2]).
-
-%% The BQ API is used in all sorts of places in all sorts of
-%% ways. Therefore we have to jump through a few different hoops
-%% in order to integration-test it.
-%%
-%% * start/1, stop/0, init/3, terminate/2, delete_and_terminate/2
-%%   - starting and stopping rabbit. durable queues / persistent msgs needed
-%%     to test recovery
-%%
-%% * publish/5, drain_confirmed/1, fetch/2, ack/2, is_duplicate/2, msg_rates/1,
-%%   needs_timeout/1, timeout/1, invoke/3, resume/1 [0]
-%%   - regular publishing and consuming, with confirms and acks and durability
-%%
-%% * publish_delivered/4    - publish with acks straight through
-%% * discard/3              - publish without acks straight through
-%% * dropwhile/2            - expire messages without DLX
-%% * fetchwhile/4           - expire messages with DLX
-%% * ackfold/4              - reject messages with DLX
-%% * requeue/2              - reject messages without DLX
-%% * drop/2                 - maxlen messages without DLX
-%% * purge/1                - issue AMQP queue.purge
-%% * purge_acks/1           - mirror queue explicit sync with unacked msgs
-%% * fold/3                 - mirror queue explicit sync
-%% * depth/1                - mirror queue implicit sync detection
-%% * len/1, is_empty/1      - info items
-%% * handle_pre_hibernate/1 - hibernation
-%%
-%% * set_ram_duration_target/2, ram_duration/1, status/1
-%%   - maybe need unit testing?
-%%
-%% [0] publish enough to get credit flow from msg store
-
-recovery_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 3),
-    publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]),
-    amqp_connection:close(Conn),
-
-    %% TODO these break coverage
-    rabbit:stop(),
-    rabbit:start(),
-
-    {Conn2, Ch2} = open(),
-    get_all(Ch2, Q, do_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]),
-    delete(Ch2, Q),
-    amqp_connection:close(Conn2),
-    passed.
-
-simple_order_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 3),
-    publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]),
-    get_all(Ch, Q, do_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]),
-    publish(Ch, Q, [2, 3, 1, 2, 3, 1, 2, 3, 1]),
-    get_all(Ch, Q, no_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]),
-    publish(Ch, Q, [3, 1, 2, 3, 1, 2, 3, 1, 2]),
-    get_all(Ch, Q, do_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]),
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-matching_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 5),
-    %% We round priority down, and 0 is the default
-    publish(Ch, Q, [undefined, 0, 5, 10, undefined]),
-    get_all(Ch, Q, do_ack, [5, 10, undefined, 0, undefined]),
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-resume_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 5),
-    amqp_channel:call(Ch, #'confirm.select'{}),
-    publish_many(Ch, Q, 10000),
-    amqp_channel:wait_for_confirms(Ch),
-    amqp_channel:call(Ch, #'queue.purge'{queue = Q}), %% Assert it exists
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-straight_through_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 3),
-    [begin
-         consume(Ch, Q, Ack),
-         [begin
-              publish1(Ch, Q, P),
-              assert_delivered(Ch, Ack, P)
-          end || P <- [1, 2, 3]],
-         cancel(Ch)
-     end || Ack <- [do_ack, no_ack]],
-    get_empty(Ch, Q),
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-dropwhile_fetchwhile_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    [begin
-         declare(Ch, Q, Args ++ arguments(3)),
-         publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]),
-         timer:sleep(10),
-         get_empty(Ch, Q),
-         delete(Ch, Q)
-     end ||
-        Args <- [[{<<"x-message-ttl">>, long, 1}],
-                 [{<<"x-message-ttl">>,          long,    1},
-                  {<<"x-dead-letter-exchange">>, longstr, <<"amq.fanout">>}]
-                ]],
-    amqp_connection:close(Conn),
-    passed.
-
-ackfold_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    Q2 = <<"test2">>,
-    declare(Ch, Q,
-            [{<<"x-dead-letter-exchange">>, longstr, <<>>},
-             {<<"x-dead-letter-routing-key">>, longstr, Q2}
-             | arguments(3)]),
-    declare(Ch, Q2, none),
-    publish(Ch, Q, [1, 2, 3]),
-    [_, _, DTag] = get_all(Ch, Q, manual_ack, [3, 2, 1]),
-    amqp_channel:cast(Ch, #'basic.nack'{delivery_tag = DTag,
-                                        multiple     = true,
-                                        requeue      = false}),
-    timer:sleep(100),
-    get_all(Ch, Q2, do_ack, [3, 2, 1]),
-    delete(Ch, Q),
-    delete(Ch, Q2),
-    amqp_connection:close(Conn),
-    passed.
-
-requeue_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 3),
-    publish(Ch, Q, [1, 2, 3]),
-    [_, _, DTag] = get_all(Ch, Q, manual_ack, [3, 2, 1]),
-    amqp_channel:cast(Ch, #'basic.nack'{delivery_tag = DTag,
-                                        multiple     = true,
-                                        requeue      = true}),
-    get_all(Ch, Q, do_ack, [3, 2, 1]),
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-drop_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, [{<<"x-max-length">>, long, 4} | arguments(3)]),
-    publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]),
-    %% We drop from the head, so this is according to the "spec" even
-    %% if not likely to be what the user wants.
-    get_all(Ch, Q, do_ack, [2, 1, 1, 1]),
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-purge_test() ->
-    {Conn, Ch} = open(),
-    Q = <<"test">>,
-    declare(Ch, Q, 3),
-    publish(Ch, Q, [1, 2, 3]),
-    amqp_channel:call(Ch, #'queue.purge'{queue = Q}),
-    get_empty(Ch, Q),
-    delete(Ch, Q),
-    amqp_connection:close(Conn),
-    passed.
-
-ram_duration_test() ->
-    QName = rabbit_misc:r(<<"/">>, queue, <<"pseudo">>),
-    Q0 = rabbit_amqqueue:pseudo_queue(QName, self()),
-    Q = Q0#amqqueue{arguments = [{<<"x-max-priority">>, long, 5}]},
-    PQ = rabbit_priority_queue,
-    BQS1 = PQ:init(Q, new, fun(_, _) -> ok end),
-    {_Duration1, BQS2} = PQ:ram_duration(BQS1),
-    BQS3 = PQ:set_ram_duration_target(infinity, BQS2),
-    BQS4 = PQ:set_ram_duration_target(1, BQS3),
-    {_Duration2, BQS5} = PQ:ram_duration(BQS4),
-    PQ:delete_and_terminate(a_whim, BQS5),
-    passed.
-
-mirror_queue_sync_with() -> cluster_ab.
-mirror_queue_sync([CfgA, _CfgB]) ->
-    Ch = pget(channel, CfgA),
-    Q = <<"test">>,
-    declare(Ch, Q, 3),
-    publish(Ch, Q, [1, 2, 3]),
-    ok = rabbit_test_util:set_ha_policy(CfgA, <<".*">>, <<"all">>),
-    publish(Ch, Q, [1, 2, 3, 1, 2, 3]),
-    %% master now has 9, slave 6.
-    get_partial(Ch, Q, manual_ack, [3, 3, 3, 2, 2, 2]),
-    %% So some but not all are unacked at the slave
-    rabbit_test_util:control_action(sync_queue, CfgA, [binary_to_list(Q)],
-                                    [{"-p", "/"}]),
-    wait_for_sync(CfgA, rabbit_misc:r(<<"/">>, queue, Q)),
-    passed.
-
-%%----------------------------------------------------------------------------
-
-open() ->
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    {Conn, Ch}.
-
-declare(Ch, Q, Args) when is_list(Args) ->
-    amqp_channel:call(Ch, #'queue.declare'{queue     = Q,
-                                           durable   = true,
-                                           arguments = Args});
-declare(Ch, Q, Max) ->
-    declare(Ch, Q, arguments(Max)).
-
-delete(Ch, Q) ->
-    amqp_channel:call(Ch, #'queue.delete'{queue = Q}).
-
-publish(Ch, Q, Ps) ->
-    amqp_channel:call(Ch, #'confirm.select'{}),
-    [publish1(Ch, Q, P) || P <- Ps],
-    amqp_channel:wait_for_confirms(Ch).
-
-publish_many(_Ch, _Q, 0) -> ok;
-publish_many( Ch,  Q, N) -> publish1(Ch, Q, random:uniform(5)),
-                            publish_many(Ch, Q, N - 1).
-
-publish1(Ch, Q, P) ->
-    amqp_channel:cast(Ch, #'basic.publish'{routing_key = Q},
-                      #amqp_msg{props   = props(P),
-                                payload = priority2bin(P)}).
-
-props(undefined) -> #'P_basic'{delivery_mode = 2};
-props(P)         -> #'P_basic'{priority      = P,
-                               delivery_mode = 2}.
-
-consume(Ch, Q, Ack) ->
-    amqp_channel:subscribe(Ch, #'basic.consume'{queue        = Q,
-                                                no_ack       = Ack =:= no_ack,
-                                                consumer_tag = <<"ctag">>},
-                           self()),
-    receive
-        #'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
-             ok
-    end.
-
-cancel(Ch) ->
-    amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = <<"ctag">>}).
-
-assert_delivered(Ch, Ack, P) ->
-    PBin = priority2bin(P),
-    receive
-        {#'basic.deliver'{delivery_tag = DTag}, #amqp_msg{payload = PBin2}} ->
-            ?assertEqual(PBin, PBin2),
-            maybe_ack(Ch, Ack, DTag)
-    end.
-
-get_all(Ch, Q, Ack, Ps) ->
-    DTags = get_partial(Ch, Q, Ack, Ps),
-    get_empty(Ch, Q),
-    DTags.
-
-get_partial(Ch, Q, Ack, Ps) ->
-    [get_ok(Ch, Q, Ack, P) || P <- Ps].
-
-get_empty(Ch, Q) ->
-    #'basic.get_empty'{} = amqp_channel:call(Ch, #'basic.get'{queue = Q}).
-
-get_ok(Ch, Q, Ack, P) ->
-    PBin = priority2bin(P),
-    {#'basic.get_ok'{delivery_tag = DTag}, #amqp_msg{payload = PBin2}} =
-        amqp_channel:call(Ch, #'basic.get'{queue  = Q,
-                                           no_ack = Ack =:= no_ack}),
-    ?assertEqual(PBin, PBin2),
-    maybe_ack(Ch, Ack, DTag).
-
-maybe_ack(Ch, do_ack, DTag) ->
-    amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DTag}),
-    DTag;
-maybe_ack(_Ch, _, DTag) ->
-    DTag.
-
-arguments(none) -> [];
-arguments(Max)  -> [{<<"x-max-priority">>, byte, Max}].
-
-priority2bin(undefined) -> <<"undefined">>;
-priority2bin(Int)       -> list_to_binary(integer_to_list(Int)).
-
-%%----------------------------------------------------------------------------
-
-wait_for_sync(Cfg, Q) ->
-    case synced(Cfg, Q) of
-        true  -> ok;
-        false -> timer:sleep(100),
-                 wait_for_sync(Cfg, Q)
-    end.
-
-synced(Cfg, Q) ->
-    Info = rpc:call(pget(node, Cfg),
-                    rabbit_amqqueue, info_all,
-                    [<<"/">>, [name, synchronised_slave_pids]]),
-    [SSPids] = [Pids || [{name, Q1}, {synchronised_slave_pids, Pids}] <- Info,
-                        Q =:= Q1],
-    length(SSPids) =:= 1.
-
-%%----------------------------------------------------------------------------
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl
deleted file mode 100644 (file)
index 389ff23..0000000
+++ /dev/null
@@ -1,143 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(simple_ha).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_test_util, [set_ha_policy/3, a2b/1]).
--import(rabbit_misc, [pget/2]).
-
--define(CONFIG, [cluster_abc, ha_policy_all]).
-
-rapid_redeclare_with() -> [cluster_ab, ha_policy_all].
-rapid_redeclare([CfgA | _]) ->
-    Ch = pget(channel, CfgA),
-    Queue = <<"test">>,
-    [begin
-         amqp_channel:call(Ch, #'queue.declare'{queue   = Queue,
-                                                durable = true}),
-         amqp_channel:call(Ch, #'queue.delete'{queue  = Queue})
-     end || _I <- lists:seq(1, 20)],
-    ok.
-
-%% Check that by the time we get a declare-ok back, the slaves are up
-%% and in Mnesia.
-declare_synchrony_with() -> [cluster_ab, ha_policy_all].
-declare_synchrony([Rabbit, Hare]) ->
-    RabbitCh = pget(channel, Rabbit),
-    HareCh = pget(channel, Hare),
-    Q = <<"mirrored-queue">>,
-    declare(RabbitCh, Q),
-    amqp_channel:call(RabbitCh, #'confirm.select'{}),
-    amqp_channel:cast(RabbitCh, #'basic.publish'{routing_key = Q},
-                      #amqp_msg{props = #'P_basic'{delivery_mode = 2}}),
-    amqp_channel:wait_for_confirms(RabbitCh),
-    _Rabbit2 = rabbit_test_configs:kill_node(Rabbit),
-
-    #'queue.declare_ok'{message_count = 1} = declare(HareCh, Q),
-    ok.
-
-declare(Ch, Name) ->
-    amqp_channel:call(Ch, #'queue.declare'{durable = true, queue = Name}).
-
-consume_survives_stop_with()     -> ?CONFIG.
-consume_survives_sigkill_with()  -> ?CONFIG.
-consume_survives_policy_with()   -> ?CONFIG.
-auto_resume_with()               -> ?CONFIG.
-auto_resume_no_ccn_client_with() -> ?CONFIG.
-
-consume_survives_stop(Cf)     -> consume_survives(Cf, fun stop/2,    true).
-consume_survives_sigkill(Cf)  -> consume_survives(Cf, fun sigkill/2, true).
-consume_survives_policy(Cf)   -> consume_survives(Cf, fun policy/2,  true).
-auto_resume(Cf)               -> consume_survives(Cf, fun sigkill/2, false).
-auto_resume_no_ccn_client(Cf) -> consume_survives(Cf, fun sigkill/2, false,
-                                                  false).
-
-confirms_survive_stop_with()    -> ?CONFIG.
-confirms_survive_sigkill_with() -> ?CONFIG.
-confirms_survive_policy_with()  -> ?CONFIG.
-
-confirms_survive_stop(Cf)    -> confirms_survive(Cf, fun stop/2).
-confirms_survive_sigkill(Cf) -> confirms_survive(Cf, fun sigkill/2).
-confirms_survive_policy(Cf)  -> confirms_survive(Cf, fun policy/2).
-
-%%----------------------------------------------------------------------------
-
-consume_survives(Nodes, DeathFun, CancelOnFailover) ->
-    consume_survives(Nodes, DeathFun, CancelOnFailover, true).
-
-consume_survives([CfgA, CfgB, CfgC] = Nodes,
-                 DeathFun, CancelOnFailover, CCNSupported) ->
-    Msgs = rabbit_test_configs:cover_work_factor(20000, CfgA),
-    Channel1 = pget(channel, CfgA),
-    Channel2 = pget(channel, CfgB),
-    Channel3 = pget(channel, CfgC),
-
-    %% declare the queue on the master, mirrored to the two slaves
-    Queue = <<"test">>,
-    amqp_channel:call(Channel1, #'queue.declare'{queue       = Queue,
-                                                 auto_delete = false}),
-
-    %% start up a consumer
-    ConsCh = case CCNSupported of
-                 true  -> Channel2;
-                 false -> open_incapable_channel(pget(port, CfgB))
-             end,
-    ConsumerPid = rabbit_ha_test_consumer:create(
-                    ConsCh, Queue, self(), CancelOnFailover, Msgs),
-
-    %% send a bunch of messages from the producer
-    ProducerPid = rabbit_ha_test_producer:create(Channel3, Queue,
-                                                 self(), false, Msgs),
-    DeathFun(CfgA, Nodes),
-    %% verify that the consumer got all msgs, or die - the await_response
-    %% calls throw an exception if anything goes wrong....
-    rabbit_ha_test_consumer:await_response(ConsumerPid),
-    rabbit_ha_test_producer:await_response(ProducerPid),
-    ok.
-
-confirms_survive([CfgA, CfgB, _CfgC] = Nodes, DeathFun) ->
-    Msgs = rabbit_test_configs:cover_work_factor(20000, CfgA),
-    Node1Channel = pget(channel, CfgA),
-    Node2Channel = pget(channel, CfgB),
-
-    %% declare the queue on the master, mirrored to the two slaves
-    Queue = <<"test">>,
-    amqp_channel:call(Node1Channel,#'queue.declare'{queue       = Queue,
-                                                    auto_delete = false,
-                                                    durable     = true}),
-
-    %% send a bunch of messages from the producer
-    ProducerPid = rabbit_ha_test_producer:create(Node2Channel, Queue,
-                                                 self(), true, Msgs),
-    DeathFun(CfgA, Nodes),
-    rabbit_ha_test_producer:await_response(ProducerPid),
-    ok.
-
-stop(Cfg, _Cfgs)    -> rabbit_test_util:kill_after(50, Cfg, stop).
-sigkill(Cfg, _Cfgs) -> rabbit_test_util:kill_after(50, Cfg, sigkill).
-policy(Cfg, [_|T])  -> Nodes = [a2b(pget(node, C)) || C <- T],
-                       set_ha_policy(Cfg, <<".*">>, {<<"nodes">>, Nodes}).
-
-open_incapable_channel(NodePort) ->
-    Props = [{<<"capabilities">>, table, []}],
-    {ok, ConsConn} =
-        amqp_connection:start(#amqp_params_network{port              = NodePort,
-                                                   client_properties = Props}),
-    {ok, Ch} = amqp_connection:open_channel(ConsConn),
-    Ch.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl
deleted file mode 100644 (file)
index 18f6f5d..0000000
+++ /dev/null
@@ -1,189 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
-%%
--module(sync_detection).
-
--compile(export_all).
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
-
--import(rabbit_test_util, [stop_app/1, start_app/1]).
--import(rabbit_misc, [pget/2]).
-
--define(LOOP_RECURSION_DELAY, 100).
-
-slave_synchronization_with() -> [cluster_ab, ha_policy_two_pos].
-slave_synchronization([Master, Slave]) ->
-    Channel = pget(channel, Master),
-    Queue = <<"ha.two.test">>,
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = Queue,
-                                                    auto_delete = false}),
-
-    %% The comments on the right are the queue length and the pending acks on
-    %% the master.
-    stop_app(Slave),
-
-    %% We get and ack one message when the slave is down, and check that when we
-    %% start the slave it's not marked as synced until ack the message.  We also
-    %% publish another message when the slave is up.
-    send_dummy_message(Channel, Queue),                                 % 1 - 0
-    {#'basic.get_ok'{delivery_tag = Tag1}, _} =
-        amqp_channel:call(Channel, #'basic.get'{queue = Queue}),        % 0 - 1
-
-    start_app(Slave),
-
-    slave_unsynced(Master, Queue),
-    send_dummy_message(Channel, Queue),                                 % 1 - 1
-    slave_unsynced(Master, Queue),
-
-    amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag1}),      % 1 - 0
-
-    slave_synced(Master, Queue),
-
-    %% We restart the slave and we send a message, so that the slave will only
-    %% have one of the messages.
-    stop_app(Slave),
-    start_app(Slave),
-
-    send_dummy_message(Channel, Queue),                                 % 2 - 0
-
-    slave_unsynced(Master, Queue),
-
-    %% We reject the message that the slave doesn't have, and verify that it's
-    %% still unsynced
-    {#'basic.get_ok'{delivery_tag = Tag2}, _} =
-        amqp_channel:call(Channel, #'basic.get'{queue = Queue}),        % 1 - 1
-    slave_unsynced(Master, Queue),
-    amqp_channel:cast(Channel, #'basic.reject'{ delivery_tag = Tag2,
-                                                requeue      = true }), % 2 - 0
-    slave_unsynced(Master, Queue),
-    {#'basic.get_ok'{delivery_tag = Tag3}, _} =
-        amqp_channel:call(Channel, #'basic.get'{queue = Queue}),        % 1 - 1
-    amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag3}),      % 1 - 0
-    slave_synced(Master, Queue),
-    {#'basic.get_ok'{delivery_tag = Tag4}, _} =
-        amqp_channel:call(Channel, #'basic.get'{queue = Queue}),        % 0 - 1
-    amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag4}),      % 0 - 0
-    slave_synced(Master, Queue).
-
-slave_synchronization_ttl_with() -> [cluster_abc, ha_policy_two_pos].
-slave_synchronization_ttl([Master, Slave, DLX]) ->
-    Channel = pget(channel, Master),
-    DLXChannel = pget(channel, DLX),
-
-    %% We declare a DLX queue to wait for messages to be TTL'ed
-    DLXQueue = <<"dlx-queue">>,
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = DLXQueue,
-                                                    auto_delete = false}),
-
-    TestMsgTTL = 5000,
-    Queue = <<"ha.two.test">>,
-    %% Sadly we need fairly high numbers for the TTL because starting/stopping
-    %% nodes takes a fair amount of time.
-    Args = [{<<"x-message-ttl">>, long, TestMsgTTL},
-            {<<"x-dead-letter-exchange">>, longstr, <<>>},
-            {<<"x-dead-letter-routing-key">>, longstr, DLXQueue}],
-    #'queue.declare_ok'{} =
-        amqp_channel:call(Channel, #'queue.declare'{queue       = Queue,
-                                                    auto_delete = false,
-                                                    arguments   = Args}),
-
-    slave_synced(Master, Queue),
-
-    %% All unknown
-    stop_app(Slave),
-    send_dummy_message(Channel, Queue),
-    send_dummy_message(Channel, Queue),
-    start_app(Slave),
-    slave_unsynced(Master, Queue),
-    wait_for_messages(DLXQueue, DLXChannel, 2),
-    slave_synced(Master, Queue),
-
-    %% 1 unknown, 1 known
-    stop_app(Slave),
-    send_dummy_message(Channel, Queue),
-    start_app(Slave),
-    slave_unsynced(Master, Queue),
-    send_dummy_message(Channel, Queue),
-    slave_unsynced(Master, Queue),
-    wait_for_messages(DLXQueue, DLXChannel, 2),
-    slave_synced(Master, Queue),
-
-    %% %% both known
-    send_dummy_message(Channel, Queue),
-    send_dummy_message(Channel, Queue),
-    slave_synced(Master, Queue),
-    wait_for_messages(DLXQueue, DLXChannel, 2),
-    slave_synced(Master, Queue),
-
-    ok.
-
-send_dummy_message(Channel, Queue) ->
-    Payload = <<"foo">>,
-    Publish = #'basic.publish'{exchange = <<>>, routing_key = Queue},
-    amqp_channel:cast(Channel, Publish, #amqp_msg{payload = Payload}).
-
-slave_pids(Node, Queue) ->
-    {ok, Q} = rpc:call(Node, rabbit_amqqueue, lookup,
-                       [rabbit_misc:r(<<"/">>, queue, Queue)]),
-    SSP = synchronised_slave_pids,
-    [{SSP, Pids}] = rpc:call(Node, rabbit_amqqueue, info, [Q, [SSP]]),
-    case Pids of
-        '' -> [];
-        _  -> Pids
-    end.
-
-%% The mnesia syncronization takes a while, but we don't want to wait for the
-%% test to fail, since the timetrap is quite high.
-wait_for_sync_status(Status, Cfg, Queue) ->
-    Max = 10000 / ?LOOP_RECURSION_DELAY,
-    wait_for_sync_status(0, Max, Status, pget(node, Cfg), Queue).
-
-wait_for_sync_status(N, Max, Status, Node, Queue) when N >= Max ->
-    erlang:error({sync_status_max_tries_failed,
-                  [{queue, Queue},
-                   {node, Node},
-                   {expected_status, Status},
-                   {max_tried, Max}]});
-wait_for_sync_status(N, Max, Status, Node, Queue) ->
-    Synced = length(slave_pids(Node, Queue)) =:= 1,
-    case Synced =:= Status of
-        true  -> ok;
-        false -> timer:sleep(?LOOP_RECURSION_DELAY),
-                 wait_for_sync_status(N + 1, Max, Status, Node, Queue)
-    end.
-
-slave_synced(Cfg, Queue) ->
-    wait_for_sync_status(true, Cfg, Queue).
-
-slave_unsynced(Cfg, Queue) ->
-    wait_for_sync_status(false, Cfg, Queue).
-
-wait_for_messages(Queue, Channel, N) ->
-    Sub = #'basic.consume'{queue = Queue},
-    #'basic.consume_ok'{consumer_tag = CTag} = amqp_channel:call(Channel, Sub),
-    receive
-        #'basic.consume_ok'{} -> ok
-    end,
-    lists:foreach(
-      fun (_) -> receive
-                     {#'basic.deliver'{delivery_tag = Tag}, _Content} ->
-                         amqp_channel:cast(Channel,
-                                           #'basic.ack'{delivery_tag = Tag})
-                 end
-      end, lists:seq(1, N)),
-    amqp_channel:call(Channel, #'basic.cancel'{consumer_tag = CTag}).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-tracing/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-tracing/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile b/rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/README b/rabbitmq-server/plugins-src/rabbitmq-tracing/README
deleted file mode 100644 (file)
index 61b5ae4..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-An opinionated tracing plugin for RabbitMQ management. Build it like
-any other plugin. After installation you should see a "Tracing" tab in
-the management UI. Hopefully use is obvious.
-
-Configuration
-=============
-
-There is one configuration option:
-
-directory: This controls where the log files go. It defaults to
-"/var/tmp/rabbitmq-tracing".
-
-Performance
-===========
-
-On my workstation, rabbitmq-tracing can write about 2000 msg/s to a
-log file. You should be careful using rabbitmq-tracing if you think
-you're going to capture more messages than this. Of course, any
-messages that can't be logged are queued.
-
-The code to serve up the log files over HTTP is pretty dumb, it loads
-the whole log into memory. If you have large log files you may wish
-to transfer them off the server in some other way.
-
-HTTP API
-========
-
-GET            /api/traces
-GET            /api/traces/<vhost>
-GET PUT DELETE /api/traces/<vhost>/<name>
-GET            /api/trace-files
-GET     DELETE /api/trace-files/<name>    (GET returns the file as text/plain,
-                                           not JSON describing it.)
-
-Example for how to create a trace:
-
-$ curl -i -u guest:guest -H "content-type:application/json" -XPUT \
-  http://localhost:55672/api/traces/%2f/my-trace \
-  -d'{"format":"text","pattern":"#", "max_payload_bytes":1000}'
-
-max_payload_bytes is optional (omit it to prevent payload truncation),
-format and pattern are mandatory.
\ No newline at end of file
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk b/rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk
deleted file mode 100644 (file)
index 58341bb..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-management
-WITH_BROKER_TEST_COMMANDS:=eunit:test(rabbit_tracing_test,[verbose])
-
-CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f)
-define construct_app_commands
-       cp -r $(PACKAGE_DIR)/priv $(APP_DIR)
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbitmq_tracing.app.src b/rabbitmq-server/plugins-src/rabbitmq-tracing/src/rabbitmq_tracing.app.src
deleted file mode 100644 (file)
index df66878..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-{application, rabbitmq_tracing,
- [{description, "RabbitMQ message logging / tracing"},
-  {vsn, "%%VSN%%"},
-  {modules, []},
-  {registered, []},
-  {mod, {rabbit_tracing_app, []}},
-  {env, [{directory, "/var/tmp/rabbitmq-tracing"}]},
-  {applications, [kernel, stdlib, rabbit, rabbitmq_management]}]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/test/src/rabbit_tracing_test.erl b/rabbitmq-server/plugins-src/rabbitmq-tracing/test/src/rabbit_tracing_test.erl
deleted file mode 100644 (file)
index df184eb..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(rabbit_tracing_test).
-
--define(LOG_DIR, "/var/tmp/rabbitmq-tracing/").
-
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
--include_lib("rabbitmq_management/include/rabbit_mgmt_test.hrl").
-
--import(rabbit_misc, [pget/2]).
-
-tracing_test() ->
-    case filelib:is_dir(?LOG_DIR) of
-        true -> {ok, Files} = file:list_dir(?LOG_DIR),
-                [ok = file:delete(?LOG_DIR ++ F) || F <- Files];
-        _    -> ok
-    end,
-
-    [] = http_get("/traces/%2f/"),
-    [] = http_get("/trace-files/"),
-
-    Args = [{format,  <<"json">>},
-            {pattern, <<"#">>}],
-    http_put("/traces/%2f/test", Args, ?NO_CONTENT),
-    assert_list([[{name,    <<"test">>},
-                  {format,  <<"json">>},
-                  {pattern, <<"#">>}]], http_get("/traces/%2f/")),
-    assert_item([{name,    <<"test">>},
-                 {format,  <<"json">>},
-                 {pattern, <<"#">>}], http_get("/traces/%2f/test")),
-
-    {ok, Conn} = amqp_connection:start(#amqp_params_network{}),
-    {ok, Ch} = amqp_connection:open_channel(Conn),
-    amqp_channel:cast(Ch, #'basic.publish'{ exchange    = <<"amq.topic">>,
-                                            routing_key = <<"key">> },
-                      #amqp_msg{props   = #'P_basic'{},
-                                payload = <<"Hello world">>}),
-
-    amqp_channel:close(Ch),
-    amqp_connection:close(Conn),
-
-    timer:sleep(100),
-
-    http_delete("/traces/%2f/test", ?NO_CONTENT),
-    [] = http_get("/traces/%2f/"),
-    assert_list([[{name, <<"test.log">>}]], http_get("/trace-files/")),
-    %% This is a bit cheeky as the log is actually one JSON doc per
-    %% line and we assume here it's only one line
-    assert_item([{type,         <<"published">>},
-                 {exchange,     <<"amq.topic">>},
-                 {routing_keys, [<<"key">>]},
-                 {payload,      base64:encode(<<"Hello world">>)}],
-                http_get("/trace-files/test.log")),
-    http_delete("/trace-files/test.log", ?NO_CONTENT),
-    ok.
-
-tracing_validation_test() ->
-    Path = "/traces/%2f/test",
-    http_put(Path, [{pattern,           <<"#">>}],    ?BAD_REQUEST),
-    http_put(Path, [{format,            <<"json">>}], ?BAD_REQUEST),
-    http_put(Path, [{format,            <<"ebcdic">>},
-                    {pattern,           <<"#">>}],    ?BAD_REQUEST),
-    http_put(Path, [{format,            <<"text">>},
-                    {pattern,           <<"#">>},
-                    {max_payload_bytes, <<"abc">>}],  ?BAD_REQUEST),
-    http_put(Path, [{format,            <<"json">>},
-                    {pattern,           <<"#">>},
-                    {max_payload_bytes, 1000}],       ?NO_CONTENT),
-    http_delete(Path, ?NO_CONTENT),
-    ok.
-
-%%---------------------------------------------------------------------------
-%% Below is copypasta from rabbit_mgmt_test_http, it's not obvious how
-%% to share that given the build system.
-
-http_get(Path) ->
-    http_get(Path, ?OK).
-
-http_get(Path, CodeExp) ->
-    http_get(Path, "guest", "guest", CodeExp).
-
-http_get(Path, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(get, Path, [auth_header(User, Pass)]),
-    assert_code(CodeExp, CodeAct, "GET", Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-http_put(Path, List, CodeExp) ->
-    http_put_raw(Path, format_for_upload(List), CodeExp).
-
-http_put(Path, List, User, Pass, CodeExp) ->
-    http_put_raw(Path, format_for_upload(List), User, Pass, CodeExp).
-
-http_post(Path, List, CodeExp) ->
-    http_post_raw(Path, format_for_upload(List), CodeExp).
-
-http_post(Path, List, User, Pass, CodeExp) ->
-    http_post_raw(Path, format_for_upload(List), User, Pass, CodeExp).
-
-format_for_upload(List) ->
-    iolist_to_binary(mochijson2:encode({struct, List})).
-
-http_put_raw(Path, Body, CodeExp) ->
-    http_upload_raw(put, Path, Body, "guest", "guest", CodeExp).
-
-http_put_raw(Path, Body, User, Pass, CodeExp) ->
-    http_upload_raw(put, Path, Body, User, Pass, CodeExp).
-
-http_post_raw(Path, Body, CodeExp) ->
-    http_upload_raw(post, Path, Body, "guest", "guest", CodeExp).
-
-http_post_raw(Path, Body, User, Pass, CodeExp) ->
-    http_upload_raw(post, Path, Body, User, Pass, CodeExp).
-
-http_upload_raw(Type, Path, Body, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(Type, Path, [auth_header(User, Pass)], Body),
-    assert_code(CodeExp, CodeAct, Type, Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-http_delete(Path, CodeExp) ->
-    http_delete(Path, "guest", "guest", CodeExp).
-
-http_delete(Path, User, Pass, CodeExp) ->
-    {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
-        req(delete, Path, [auth_header(User, Pass)]),
-    assert_code(CodeExp, CodeAct, "DELETE", Path, ResBody),
-    decode(CodeExp, Headers, ResBody).
-
-assert_code(CodeExp, CodeAct, Type, Path, Body) ->
-    case CodeExp of
-        CodeAct -> ok;
-        _       -> throw({expected, CodeExp, got, CodeAct, type, Type,
-                          path, Path, body, Body})
-    end.
-
-req(Type, Path, Headers) ->
-    httpc:request(Type, {?PREFIX ++ Path, Headers}, ?HTTPC_OPTS, []).
-
-req(Type, Path, Headers, Body) ->
-    httpc:request(Type, {?PREFIX ++ Path, Headers, "application/json", Body},
-                  ?HTTPC_OPTS, []).
-
-decode(?OK, _Headers,  ResBody) -> cleanup(mochijson2:decode(ResBody));
-decode(_,    Headers, _ResBody) -> Headers.
-
-cleanup(L) when is_list(L) ->
-    [cleanup(I) || I <- L];
-cleanup({struct, I}) ->
-    cleanup(I);
-cleanup({K, V}) when is_binary(K) ->
-    {list_to_atom(binary_to_list(K)), cleanup(V)};
-cleanup(I) ->
-    I.
-
-auth_header(Username, Password) ->
-    {"Authorization",
-     "Basic " ++ binary_to_list(base64:encode(Username ++ ":" ++ Password))}.
-
-%%---------------------------------------------------------------------------
-
-assert_list(Exp, Act) ->
-    case length(Exp) == length(Act) of
-        true  -> ok;
-        false -> throw({expected, Exp, actual, Act})
-    end,
-    [case length(lists:filter(fun(ActI) -> test_item(ExpI, ActI) end, Act)) of
-         1 -> ok;
-         N -> throw({found, N, ExpI, in, Act})
-     end || ExpI <- Exp].
-
-assert_item(Exp, Act) ->
-    case test_item0(Exp, Act) of
-        [] -> ok;
-        Or -> throw(Or)
-    end.
-
-test_item(Exp, Act) ->
-    case test_item0(Exp, Act) of
-        [] -> true;
-        _  -> false
-    end.
-
-test_item0(Exp, Act) ->
-    [{did_not_find, ExpI, in, Act} || ExpI <- Exp,
-                                      not lists:member(ExpI, Act)].
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk
deleted file mode 100644 (file)
index d5913ca..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-DEPS:=mochiweb-wrapper webmachine-wrapper
-WITH_BROKER_TEST_COMMANDS:=rabbit_web_dispatch_test:test()
-STANDALONE_TEST_COMMANDS:=rabbit_web_dispatch_test_unit:test()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/priv/www/index.html b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/priv/www/index.html
deleted file mode 100644 (file)
index b9f7cd4..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-<html>
-  <head>
-    <title>RabbitMQ HTTP Server Test Page</title>
-  </head>
-  <body>
-  </body>
-</html>
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/src/rabbit_web_dispatch_test.erl b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/src/rabbit_web_dispatch_test.erl
deleted file mode 100644 (file)
index 139dccc..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_web_dispatch_test).
-
--include_lib("eunit/include/eunit.hrl").
-
-query_static_resource_test() ->
-    %% TODO this is a fairly rubbish test, but not as bad as it was
-    rabbit_web_dispatch:register_static_context(test, [{port, 12345}],
-                                                "rabbit_web_dispatch_test",
-                                                ?MODULE, "priv/www", "Test"),
-    {ok, {_Status, _Headers, Body}} =
-        httpc:request("http://localhost:12345/rabbit_web_dispatch_test/index.html"),
-    ?assert(string:str(Body, "RabbitMQ HTTP Server Test Page") /= 0).
-
-add_idempotence_test() ->
-    F = fun(_Req) -> ok end,
-    L = {"/foo", "Foo"},
-    rabbit_web_dispatch_registry:add(foo, [{port, 12345}], F, F, L),
-    rabbit_web_dispatch_registry:add(foo, [{port, 12345}], F, F, L),
-    ?assertEqual(
-       1, length([ok || {"/foo", _, _} <-
-                            rabbit_web_dispatch_registry:list_all()])),
-    passed.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/src/rabbit_web_dispatch_test_unit.erl b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/test/src/rabbit_web_dispatch_test_unit.erl
deleted file mode 100644 (file)
index b90ed40..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2010-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_web_dispatch_test_unit).
-
--include_lib("eunit/include/eunit.hrl").
-
-relativise_test() ->
-    Rel = fun rabbit_web_dispatch_util:relativise/2,
-    ?assertEqual("baz",        Rel("/foo/bar/bash", "/foo/bar/baz")),
-    ?assertEqual("../bax/baz", Rel("/foo/bar/bash", "/foo/bax/baz")),
-    ?assertEqual("../bax/baz", Rel("/bar/bash",     "/bax/baz")),
-    ?assertEqual("..",         Rel("/foo/bar/bash", "/foo/bar")),
-    ?assertEqual("../..",      Rel("/foo/bar/bash", "/foo")),
-    ?assertEqual("bar/baz",    Rel("/foo/bar",      "/foo/bar/baz")),
-    ?assertEqual("foo",        Rel("/",             "/foo")).
-
-unrelativise_test() ->
-    Un = fun rabbit_web_dispatch_util:unrelativise/2,
-    ?assertEqual("/foo/bar", Un("/foo/foo", "bar")),
-    ?assertEqual("/foo/bar", Un("/foo/foo", "./bar")),
-    ?assertEqual("bar",      Un("foo", "bar")),
-    ?assertEqual("/baz/bar", Un("/foo/foo", "../baz/bar")).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk
deleted file mode 100644 (file)
index 01e3b9d..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-RELEASABLE:=true
-DEPS:=rabbitmq-web-dispatch rabbitmq-web-stomp rabbitmq-server
-
-define construct_app_commands
-       cp -r $(PACKAGE_DIR)/priv $(APP_DIR)
-endef
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js
deleted file mode 100644 (file)
index 585215c..0000000
+++ /dev/null
@@ -1,2379 +0,0 @@
-/* SockJS client, version 0.3.4, http://sockjs.org, MIT License
-
-Copyright (c) 2011-2012 VMware, Inc.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-*/
-
-// JSON2 by Douglas Crockford (minified).
-var JSON;JSON||(JSON={}),function(){function str(a,b){var c,d,e,f,g=gap,h,i=b[a];i&&typeof i=="object"&&typeof i.toJSON=="function"&&(i=i.toJSON(a)),typeof rep=="function"&&(i=rep.call(b,a,i));switch(typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i)return"null";gap+=indent,h=[];if(Object.prototype.toString.apply(i)==="[object Array]"){f=i.length;for(c=0;c<f;c+=1)h[c]=str(c,i)||"null";e=h.length===0?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]",gap=g;return e}if(rep&&typeof rep=="object"){f=rep.length;for(c=0;c<f;c+=1)typeof rep[c]=="string"&&(d=rep[c],e=str(d,i),e&&h.push(quote(d)+(gap?": ":":")+e))}else for(d in i)Object.prototype.hasOwnProperty.call(i,d)&&(e=str(d,i),e&&h.push(quote(d)+(gap?": ":":")+e));e=h.length===0?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}",gap=g;return e}}function quote(a){escapable.lastIndex=0;return escapable.test(a)?'"'+a.replace(escapable,function(a){var b=meta[a];return typeof b=="string"?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function f(a){return a<10?"0"+a:a}"use strict",typeof Date.prototype.toJSON!="function"&&(Date.prototype.toJSON=function(a){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null},String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(a){return this.valueOf()});var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;typeof JSON.stringify!="function"&&(JSON.stringify=function(a,b,c){var d;gap="",indent="";if(typeof c=="number")for(d=0;d<c;d+=1)indent+=" ";else typeof c=="string"&&(indent=c);rep=b;if(!b||typeof b=="function"||typeof b=="object"&&typeof b.length=="number")return str("",{"":a});throw new Error("JSON.stringify")}),typeof JSON.parse!="function"&&(JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&typeof e=="object")for(c in e)Object.prototype.hasOwnProperty.call(e,c)&&(d=walk(e,c),d!==undefined?e[c]=d:delete e[c]);return reviver.call(a,b,e)}var j;text=String(text),cx.lastIndex=0,cx.test(text)&&(text=text.replace(cx,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)}));if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver=="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")})}()
-
-
-//     [*] Including lib/index.js
-// Public object
-SockJS = (function(){
-              var _document = document;
-              var _window = window;
-              var utils = {};
-
-
-//         [*] Including lib/reventtarget.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-/* Simplified implementation of DOM2 EventTarget.
- *   http://www.w3.org/TR/DOM-Level-2-Events/events.html#Events-EventTarget
- */
-var REventTarget = function() {};
-REventTarget.prototype.addEventListener = function (eventType, listener) {
-    if(!this._listeners) {
-         this._listeners = {};
-    }
-    if(!(eventType in this._listeners)) {
-        this._listeners[eventType] = [];
-    }
-    var arr = this._listeners[eventType];
-    if(utils.arrIndexOf(arr, listener) === -1) {
-        arr.push(listener);
-    }
-    return;
-};
-
-REventTarget.prototype.removeEventListener = function (eventType, listener) {
-    if(!(this._listeners && (eventType in this._listeners))) {
-        return;
-    }
-    var arr = this._listeners[eventType];
-    var idx = utils.arrIndexOf(arr, listener);
-    if (idx !== -1) {
-        if(arr.length > 1) {
-            this._listeners[eventType] = arr.slice(0, idx).concat( arr.slice(idx+1) );
-        } else {
-            delete this._listeners[eventType];
-        }
-        return;
-    }
-    return;
-};
-
-REventTarget.prototype.dispatchEvent = function (event) {
-    var t = event.type;
-    var args = Array.prototype.slice.call(arguments, 0);
-    if (this['on'+t]) {
-        this['on'+t].apply(this, args);
-    }
-    if (this._listeners && t in this._listeners) {
-        for(var i=0; i < this._listeners[t].length; i++) {
-            this._listeners[t][i].apply(this, args);
-        }
-    }
-};
-//         [*] End of lib/reventtarget.js
-
-
-//         [*] Including lib/simpleevent.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var SimpleEvent = function(type, obj) {
-    this.type = type;
-    if (typeof obj !== 'undefined') {
-        for(var k in obj) {
-            if (!obj.hasOwnProperty(k)) continue;
-            this[k] = obj[k];
-        }
-    }
-};
-
-SimpleEvent.prototype.toString = function() {
-    var r = [];
-    for(var k in this) {
-        if (!this.hasOwnProperty(k)) continue;
-        var v = this[k];
-        if (typeof v === 'function') v = '[function]';
-        r.push(k + '=' + v);
-    }
-    return 'SimpleEvent(' + r.join(', ') + ')';
-};
-//         [*] End of lib/simpleevent.js
-
-
-//         [*] Including lib/eventemitter.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var EventEmitter = function(events) {
-    var that = this;
-    that._events = events || [];
-    that._listeners = {};
-};
-EventEmitter.prototype.emit = function(type) {
-    var that = this;
-    that._verifyType(type);
-    if (that._nuked) return;
-
-    var args = Array.prototype.slice.call(arguments, 1);
-    if (that['on'+type]) {
-        that['on'+type].apply(that, args);
-    }
-    if (type in that._listeners) {
-        for(var i = 0; i < that._listeners[type].length; i++) {
-            that._listeners[type][i].apply(that, args);
-        }
-    }
-};
-
-EventEmitter.prototype.on = function(type, callback) {
-    var that = this;
-    that._verifyType(type);
-    if (that._nuked) return;
-
-    if (!(type in that._listeners)) {
-        that._listeners[type] = [];
-    }
-    that._listeners[type].push(callback);
-};
-
-EventEmitter.prototype._verifyType = function(type) {
-    var that = this;
-    if (utils.arrIndexOf(that._events, type) === -1) {
-        utils.log('Event ' + JSON.stringify(type) +
-                  ' not listed ' + JSON.stringify(that._events) +
-                  ' in ' + that);
-    }
-};
-
-EventEmitter.prototype.nuke = function() {
-    var that = this;
-    that._nuked = true;
-    for(var i=0; i<that._events.length; i++) {
-        delete that[that._events[i]];
-    }
-    that._listeners = {};
-};
-//         [*] End of lib/eventemitter.js
-
-
-//         [*] Including lib/utils.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var random_string_chars = 'abcdefghijklmnopqrstuvwxyz0123456789_';
-utils.random_string = function(length, max) {
-    max = max || random_string_chars.length;
-    var i, ret = [];
-    for(i=0; i < length; i++) {
-        ret.push( random_string_chars.substr(Math.floor(Math.random() * max),1) );
-    }
-    return ret.join('');
-};
-utils.random_number = function(max) {
-    return Math.floor(Math.random() * max);
-};
-utils.random_number_string = function(max) {
-    var t = (''+(max - 1)).length;
-    var p = Array(t+1).join('0');
-    return (p + utils.random_number(max)).slice(-t);
-};
-
-// Assuming that url looks like: http://asdasd:111/asd
-utils.getOrigin = function(url) {
-    url += '/';
-    var parts = url.split('/').slice(0, 3);
-    return parts.join('/');
-};
-
-utils.isSameOriginUrl = function(url_a, url_b) {
-    // location.origin would do, but it's not always available.
-    if (!url_b) url_b = _window.location.href;
-
-    return (url_a.split('/').slice(0,3).join('/')
-                ===
-            url_b.split('/').slice(0,3).join('/'));
-};
-
-utils.getParentDomain = function(url) {
-    // ipv4 ip address
-    if (/^[0-9.]*$/.test(url)) return url;
-    // ipv6 ip address
-    if (/^\[/.test(url)) return url;
-    // no dots
-    if (!(/[.]/.test(url))) return url;
-
-    var parts = url.split('.').slice(1);
-    return parts.join('.');
-};
-
-utils.objectExtend = function(dst, src) {
-    for(var k in src) {
-        if (src.hasOwnProperty(k)) {
-            dst[k] = src[k];
-        }
-    }
-    return dst;
-};
-
-var WPrefix = '_jp';
-
-utils.polluteGlobalNamespace = function() {
-    if (!(WPrefix in _window)) {
-        _window[WPrefix] = {};
-    }
-};
-
-utils.closeFrame = function (code, reason) {
-    return 'c'+JSON.stringify([code, reason]);
-};
-
-utils.userSetCode = function (code) {
-    return code === 1000 || (code >= 3000 && code <= 4999);
-};
-
-// See: http://www.erg.abdn.ac.uk/~gerrit/dccp/notes/ccid2/rto_estimator/
-// and RFC 2988.
-utils.countRTO = function (rtt) {
-    var rto;
-    if (rtt > 100) {
-        rto = 3 * rtt; // rto > 300msec
-    } else {
-        rto = rtt + 200; // 200msec < rto <= 300msec
-    }
-    return rto;
-}
-
-utils.log = function() {
-    if (_window.console && console.log && console.log.apply) {
-        console.log.apply(console, arguments);
-    }
-};
-
-utils.bind = function(fun, that) {
-    if (fun.bind) {
-        return fun.bind(that);
-    } else {
-        return function() {
-            return fun.apply(that, arguments);
-        };
-    }
-};
-
-utils.flatUrl = function(url) {
-    return url.indexOf('?') === -1 && url.indexOf('#') === -1;
-};
-
-utils.amendUrl = function(url) {
-    var dl = _document.location;
-    if (!url) {
-        throw new Error('Wrong url for SockJS');
-    }
-    if (!utils.flatUrl(url)) {
-        throw new Error('Only basic urls are supported in SockJS');
-    }
-
-    //  '//abc' --> 'http://abc'
-    if (url.indexOf('//') === 0) {
-        url = dl.protocol + url;
-    }
-    // '/abc' --> 'http://localhost:80/abc'
-    if (url.indexOf('/') === 0) {
-        url = dl.protocol + '//' + dl.host + url;
-    }
-    // strip trailing slashes
-    url = url.replace(/[/]+$/,'');
-    return url;
-};
-
-// IE doesn't support [].indexOf.
-utils.arrIndexOf = function(arr, obj){
-    for(var i=0; i < arr.length; i++){
-        if(arr[i] === obj){
-            return i;
-        }
-    }
-    return -1;
-};
-
-utils.arrSkip = function(arr, obj) {
-    var idx = utils.arrIndexOf(arr, obj);
-    if (idx === -1) {
-        return arr.slice();
-    } else {
-        var dst = arr.slice(0, idx);
-        return dst.concat(arr.slice(idx+1));
-    }
-};
-
-// Via: https://gist.github.com/1133122/2121c601c5549155483f50be3da5305e83b8c5df
-utils.isArray = Array.isArray || function(value) {
-    return {}.toString.call(value).indexOf('Array') >= 0
-};
-
-utils.delay = function(t, fun) {
-    if(typeof t === 'function') {
-        fun = t;
-        t = 0;
-    }
-    return setTimeout(fun, t);
-};
-
-
-// Chars worth escaping, as defined by Douglas Crockford:
-//   https://github.com/douglascrockford/JSON-js/blob/47a9882cddeb1e8529e07af9736218075372b8ac/json2.js#L196
-var json_escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
-    json_lookup = {
-"\u0000":"\\u0000","\u0001":"\\u0001","\u0002":"\\u0002","\u0003":"\\u0003",
-"\u0004":"\\u0004","\u0005":"\\u0005","\u0006":"\\u0006","\u0007":"\\u0007",
-"\b":"\\b","\t":"\\t","\n":"\\n","\u000b":"\\u000b","\f":"\\f","\r":"\\r",
-"\u000e":"\\u000e","\u000f":"\\u000f","\u0010":"\\u0010","\u0011":"\\u0011",
-"\u0012":"\\u0012","\u0013":"\\u0013","\u0014":"\\u0014","\u0015":"\\u0015",
-"\u0016":"\\u0016","\u0017":"\\u0017","\u0018":"\\u0018","\u0019":"\\u0019",
-"\u001a":"\\u001a","\u001b":"\\u001b","\u001c":"\\u001c","\u001d":"\\u001d",
-"\u001e":"\\u001e","\u001f":"\\u001f","\"":"\\\"","\\":"\\\\",
-"\u007f":"\\u007f","\u0080":"\\u0080","\u0081":"\\u0081","\u0082":"\\u0082",
-"\u0083":"\\u0083","\u0084":"\\u0084","\u0085":"\\u0085","\u0086":"\\u0086",
-"\u0087":"\\u0087","\u0088":"\\u0088","\u0089":"\\u0089","\u008a":"\\u008a",
-"\u008b":"\\u008b","\u008c":"\\u008c","\u008d":"\\u008d","\u008e":"\\u008e",
-"\u008f":"\\u008f","\u0090":"\\u0090","\u0091":"\\u0091","\u0092":"\\u0092",
-"\u0093":"\\u0093","\u0094":"\\u0094","\u0095":"\\u0095","\u0096":"\\u0096",
-"\u0097":"\\u0097","\u0098":"\\u0098","\u0099":"\\u0099","\u009a":"\\u009a",
-"\u009b":"\\u009b","\u009c":"\\u009c","\u009d":"\\u009d","\u009e":"\\u009e",
-"\u009f":"\\u009f","\u00ad":"\\u00ad","\u0600":"\\u0600","\u0601":"\\u0601",
-"\u0602":"\\u0602","\u0603":"\\u0603","\u0604":"\\u0604","\u070f":"\\u070f",
-"\u17b4":"\\u17b4","\u17b5":"\\u17b5","\u200c":"\\u200c","\u200d":"\\u200d",
-"\u200e":"\\u200e","\u200f":"\\u200f","\u2028":"\\u2028","\u2029":"\\u2029",
-"\u202a":"\\u202a","\u202b":"\\u202b","\u202c":"\\u202c","\u202d":"\\u202d",
-"\u202e":"\\u202e","\u202f":"\\u202f","\u2060":"\\u2060","\u2061":"\\u2061",
-"\u2062":"\\u2062","\u2063":"\\u2063","\u2064":"\\u2064","\u2065":"\\u2065",
-"\u2066":"\\u2066","\u2067":"\\u2067","\u2068":"\\u2068","\u2069":"\\u2069",
-"\u206a":"\\u206a","\u206b":"\\u206b","\u206c":"\\u206c","\u206d":"\\u206d",
-"\u206e":"\\u206e","\u206f":"\\u206f","\ufeff":"\\ufeff","\ufff0":"\\ufff0",
-"\ufff1":"\\ufff1","\ufff2":"\\ufff2","\ufff3":"\\ufff3","\ufff4":"\\ufff4",
-"\ufff5":"\\ufff5","\ufff6":"\\ufff6","\ufff7":"\\ufff7","\ufff8":"\\ufff8",
-"\ufff9":"\\ufff9","\ufffa":"\\ufffa","\ufffb":"\\ufffb","\ufffc":"\\ufffc",
-"\ufffd":"\\ufffd","\ufffe":"\\ufffe","\uffff":"\\uffff"};
-
-// Some extra characters that Chrome gets wrong, and substitutes with
-// something else on the wire.
-var extra_escapable = /[\x00-\x1f\ud800-\udfff\ufffe\uffff\u0300-\u0333\u033d-\u0346\u034a-\u034c\u0350-\u0352\u0357-\u0358\u035c-\u0362\u0374\u037e\u0387\u0591-\u05af\u05c4\u0610-\u0617\u0653-\u0654\u0657-\u065b\u065d-\u065e\u06df-\u06e2\u06eb-\u06ec\u0730\u0732-\u0733\u0735-\u0736\u073a\u073d\u073f-\u0741\u0743\u0745\u0747\u07eb-\u07f1\u0951\u0958-\u095f\u09dc-\u09dd\u09df\u0a33\u0a36\u0a59-\u0a5b\u0a5e\u0b5c-\u0b5d\u0e38-\u0e39\u0f43\u0f4d\u0f52\u0f57\u0f5c\u0f69\u0f72-\u0f76\u0f78\u0f80-\u0f83\u0f93\u0f9d\u0fa2\u0fa7\u0fac\u0fb9\u1939-\u193a\u1a17\u1b6b\u1cda-\u1cdb\u1dc0-\u1dcf\u1dfc\u1dfe\u1f71\u1f73\u1f75\u1f77\u1f79\u1f7b\u1f7d\u1fbb\u1fbe\u1fc9\u1fcb\u1fd3\u1fdb\u1fe3\u1feb\u1fee-\u1fef\u1ff9\u1ffb\u1ffd\u2000-\u2001\u20d0-\u20d1\u20d4-\u20d7\u20e7-\u20e9\u2126\u212a-\u212b\u2329-\u232a\u2adc\u302b-\u302c\uaab2-\uaab3\uf900-\ufa0d\ufa10\ufa12\ufa15-\ufa1e\ufa20\ufa22\ufa25-\ufa26\ufa2a-\ufa2d\ufa30-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4e\ufff0-\uffff]/g,
-    extra_lookup;
-
-// JSON Quote string. Use native implementation when possible.
-var JSONQuote = (JSON && JSON.stringify) || function(string) {
-    json_escapable.lastIndex = 0;
-    if (json_escapable.test(string)) {
-        string = string.replace(json_escapable, function(a) {
-            return json_lookup[a];
-        });
-    }
-    return '"' + string + '"';
-};
-
-// This may be quite slow, so let's delay until user actually uses bad
-// characters.
-var unroll_lookup = function(escapable) {
-    var i;
-    var unrolled = {}
-    var c = []
-    for(i=0; i<65536; i++) {
-        c.push( String.fromCharCode(i) );
-    }
-    escapable.lastIndex = 0;
-    c.join('').replace(escapable, function (a) {
-        unrolled[ a ] = '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
-        return '';
-    });
-    escapable.lastIndex = 0;
-    return unrolled;
-};
-
-// Quote string, also taking care of unicode characters that browsers
-// often break. Especially, take care of unicode surrogates:
-//    http://en.wikipedia.org/wiki/Mapping_of_Unicode_characters#Surrogates
-utils.quote = function(string) {
-    var quoted = JSONQuote(string);
-
-    // In most cases this should be very fast and good enough.
-    extra_escapable.lastIndex = 0;
-    if(!extra_escapable.test(quoted)) {
-        return quoted;
-    }
-
-    if(!extra_lookup) extra_lookup = unroll_lookup(extra_escapable);
-
-    return quoted.replace(extra_escapable, function(a) {
-        return extra_lookup[a];
-    });
-}
-
-var _all_protocols = ['websocket',
-                      'xdr-streaming',
-                      'xhr-streaming',
-                      'iframe-eventsource',
-                      'iframe-htmlfile',
-                      'xdr-polling',
-                      'xhr-polling',
-                      'iframe-xhr-polling',
-                      'jsonp-polling'];
-
-utils.probeProtocols = function() {
-    var probed = {};
-    for(var i=0; i<_all_protocols.length; i++) {
-        var protocol = _all_protocols[i];
-        // User can have a typo in protocol name.
-        probed[protocol] = SockJS[protocol] &&
-                           SockJS[protocol].enabled();
-    }
-    return probed;
-};
-
-utils.detectProtocols = function(probed, protocols_whitelist, info) {
-    var pe = {},
-        protocols = [];
-    if (!protocols_whitelist) protocols_whitelist = _all_protocols;
-    for(var i=0; i<protocols_whitelist.length; i++) {
-        var protocol = protocols_whitelist[i];
-        pe[protocol] = probed[protocol];
-    }
-    var maybe_push = function(protos) {
-        var proto = protos.shift();
-        if (pe[proto]) {
-            protocols.push(proto);
-        } else {
-            if (protos.length > 0) {
-                maybe_push(protos);
-            }
-        }
-    }
-
-    // 1. Websocket
-    if (info.websocket !== false) {
-        maybe_push(['websocket']);
-    }
-
-    // 2. Streaming
-    if (pe['xhr-streaming'] && !info.null_origin) {
-        protocols.push('xhr-streaming');
-    } else {
-        if (pe['xdr-streaming'] && !info.cookie_needed && !info.null_origin) {
-            protocols.push('xdr-streaming');
-        } else {
-            maybe_push(['iframe-eventsource',
-                        'iframe-htmlfile']);
-        }
-    }
-
-    // 3. Polling
-    if (pe['xhr-polling'] && !info.null_origin) {
-        protocols.push('xhr-polling');
-    } else {
-        if (pe['xdr-polling'] && !info.cookie_needed && !info.null_origin) {
-            protocols.push('xdr-polling');
-        } else {
-            maybe_push(['iframe-xhr-polling',
-                        'jsonp-polling']);
-        }
-    }
-    return protocols;
-}
-//         [*] End of lib/utils.js
-
-
-//         [*] Including lib/dom.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-// May be used by htmlfile jsonp and transports.
-var MPrefix = '_sockjs_global';
-utils.createHook = function() {
-    var window_id = 'a' + utils.random_string(8);
-    if (!(MPrefix in _window)) {
-        var map = {};
-        _window[MPrefix] = function(window_id) {
-            if (!(window_id in map)) {
-                map[window_id] = {
-                    id: window_id,
-                    del: function() {delete map[window_id];}
-                };
-            }
-            return map[window_id];
-        }
-    }
-    return _window[MPrefix](window_id);
-};
-
-
-
-utils.attachMessage = function(listener) {
-    utils.attachEvent('message', listener);
-};
-utils.attachEvent = function(event, listener) {
-    if (typeof _window.addEventListener !== 'undefined') {
-        _window.addEventListener(event, listener, false);
-    } else {
-        // IE quirks.
-        // According to: http://stevesouders.com/misc/test-postmessage.php
-        // the message gets delivered only to 'document', not 'window'.
-        _document.attachEvent("on" + event, listener);
-        // I get 'window' for ie8.
-        _window.attachEvent("on" + event, listener);
-    }
-};
-
-utils.detachMessage = function(listener) {
-    utils.detachEvent('message', listener);
-};
-utils.detachEvent = function(event, listener) {
-    if (typeof _window.addEventListener !== 'undefined') {
-        _window.removeEventListener(event, listener, false);
-    } else {
-        _document.detachEvent("on" + event, listener);
-        _window.detachEvent("on" + event, listener);
-    }
-};
-
-
-var on_unload = {};
-// Things registered after beforeunload are to be called immediately.
-var after_unload = false;
-
-var trigger_unload_callbacks = function() {
-    for(var ref in on_unload) {
-        on_unload[ref]();
-        delete on_unload[ref];
-    };
-};
-
-var unload_triggered = function() {
-    if(after_unload) return;
-    after_unload = true;
-    trigger_unload_callbacks();
-};
-
-// 'unload' alone is not reliable in opera within an iframe, but we
-// can't use `beforeunload` as IE fires it on javascript: links.
-utils.attachEvent('unload', unload_triggered);
-
-utils.unload_add = function(listener) {
-    var ref = utils.random_string(8);
-    on_unload[ref] = listener;
-    if (after_unload) {
-        utils.delay(trigger_unload_callbacks);
-    }
-    return ref;
-};
-utils.unload_del = function(ref) {
-    if (ref in on_unload)
-        delete on_unload[ref];
-};
-
-
-utils.createIframe = function (iframe_url, error_callback) {
-    var iframe = _document.createElement('iframe');
-    var tref, unload_ref;
-    var unattach = function() {
-        clearTimeout(tref);
-        // Explorer had problems with that.
-        try {iframe.onload = null;} catch (x) {}
-        iframe.onerror = null;
-    };
-    var cleanup = function() {
-        if (iframe) {
-            unattach();
-            // This timeout makes chrome fire onbeforeunload event
-            // within iframe. Without the timeout it goes straight to
-            // onunload.
-            setTimeout(function() {
-                if(iframe) {
-                    iframe.parentNode.removeChild(iframe);
-                }
-                iframe = null;
-            }, 0);
-            utils.unload_del(unload_ref);
-        }
-    };
-    var onerror = function(r) {
-        if (iframe) {
-            cleanup();
-            error_callback(r);
-        }
-    };
-    var post = function(msg, origin) {
-        try {
-            // When the iframe is not loaded, IE raises an exception
-            // on 'contentWindow'.
-            if (iframe && iframe.contentWindow) {
-                iframe.contentWindow.postMessage(msg, origin);
-            }
-        } catch (x) {};
-    };
-
-    iframe.src = iframe_url;
-    iframe.style.display = 'none';
-    iframe.style.position = 'absolute';
-    iframe.onerror = function(){onerror('onerror');};
-    iframe.onload = function() {
-        // `onload` is triggered before scripts on the iframe are
-        // executed. Give it few seconds to actually load stuff.
-        clearTimeout(tref);
-        tref = setTimeout(function(){onerror('onload timeout');}, 2000);
-    };
-    _document.body.appendChild(iframe);
-    tref = setTimeout(function(){onerror('timeout');}, 15000);
-    unload_ref = utils.unload_add(cleanup);
-    return {
-        post: post,
-        cleanup: cleanup,
-        loaded: unattach
-    };
-};
-
-utils.createHtmlfile = function (iframe_url, error_callback) {
-    var doc = new ActiveXObject('htmlfile');
-    var tref, unload_ref;
-    var iframe;
-    var unattach = function() {
-        clearTimeout(tref);
-    };
-    var cleanup = function() {
-        if (doc) {
-            unattach();
-            utils.unload_del(unload_ref);
-            iframe.parentNode.removeChild(iframe);
-            iframe = doc = null;
-            CollectGarbage();
-        }
-    };
-    var onerror = function(r)  {
-        if (doc) {
-            cleanup();
-            error_callback(r);
-        }
-    };
-    var post = function(msg, origin) {
-        try {
-            // When the iframe is not loaded, IE raises an exception
-            // on 'contentWindow'.
-            if (iframe && iframe.contentWindow) {
-                iframe.contentWindow.postMessage(msg, origin);
-            }
-        } catch (x) {};
-    };
-
-    doc.open();
-    doc.write('<html><s' + 'cript>' +
-              'document.domain="' + document.domain + '";' +
-              '</s' + 'cript></html>');
-    doc.close();
-    doc.parentWindow[WPrefix] = _window[WPrefix];
-    var c = doc.createElement('div');
-    doc.body.appendChild(c);
-    iframe = doc.createElement('iframe');
-    c.appendChild(iframe);
-    iframe.src = iframe_url;
-    tref = setTimeout(function(){onerror('timeout');}, 15000);
-    unload_ref = utils.unload_add(cleanup);
-    return {
-        post: post,
-        cleanup: cleanup,
-        loaded: unattach
-    };
-};
-//         [*] End of lib/dom.js
-
-
-//         [*] Including lib/dom2.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var AbstractXHRObject = function(){};
-AbstractXHRObject.prototype = new EventEmitter(['chunk', 'finish']);
-
-AbstractXHRObject.prototype._start = function(method, url, payload, opts) {
-    var that = this;
-
-    try {
-        that.xhr = new XMLHttpRequest();
-    } catch(x) {};
-
-    if (!that.xhr) {
-        try {
-            that.xhr = new _window.ActiveXObject('Microsoft.XMLHTTP');
-        } catch(x) {};
-    }
-    if (_window.ActiveXObject || _window.XDomainRequest) {
-        // IE8 caches even POSTs
-        url += ((url.indexOf('?') === -1) ? '?' : '&') + 't='+(+new Date);
-    }
-
-    // Explorer tends to keep connection open, even after the
-    // tab gets closed: http://bugs.jquery.com/ticket/5280
-    that.unload_ref = utils.unload_add(function(){that._cleanup(true);});
-    try {
-        that.xhr.open(method, url, true);
-    } catch(e) {
-        // IE raises an exception on wrong port.
-        that.emit('finish', 0, '');
-        that._cleanup();
-        return;
-    };
-
-    if (!opts || !opts.no_credentials) {
-        // Mozilla docs says https://developer.mozilla.org/en/XMLHttpRequest :
-        // "This never affects same-site requests."
-        that.xhr.withCredentials = 'true';
-    }
-    if (opts && opts.headers) {
-        for(var key in opts.headers) {
-            that.xhr.setRequestHeader(key, opts.headers[key]);
-        }
-    }
-
-    that.xhr.onreadystatechange = function() {
-        if (that.xhr) {
-            var x = that.xhr;
-            switch (x.readyState) {
-            case 3:
-                // IE doesn't like peeking into responseText or status
-                // on Microsoft.XMLHTTP and readystate=3
-                try {
-                    var status = x.status;
-                    var text = x.responseText;
-                } catch (x) {};
-                // IE returns 1223 for 204: http://bugs.jquery.com/ticket/1450
-                if (status === 1223) status = 204;
-
-                // IE does return readystate == 3 for 404 answers.
-                if (text && text.length > 0) {
-                    that.emit('chunk', status, text);
-                }
-                break;
-            case 4:
-                var status = x.status;
-                // IE returns 1223 for 204: http://bugs.jquery.com/ticket/1450
-                if (status === 1223) status = 204;
-
-                that.emit('finish', status, x.responseText);
-                that._cleanup(false);
-                break;
-            }
-        }
-    };
-    that.xhr.send(payload);
-};
-
-AbstractXHRObject.prototype._cleanup = function(abort) {
-    var that = this;
-    if (!that.xhr) return;
-    utils.unload_del(that.unload_ref);
-
-    // IE needs this field to be a function
-    that.xhr.onreadystatechange = function(){};
-
-    if (abort) {
-        try {
-            that.xhr.abort();
-        } catch(x) {};
-    }
-    that.unload_ref = that.xhr = null;
-};
-
-AbstractXHRObject.prototype.close = function() {
-    var that = this;
-    that.nuke();
-    that._cleanup(true);
-};
-
-var XHRCorsObject = utils.XHRCorsObject = function() {
-    var that = this, args = arguments;
-    utils.delay(function(){that._start.apply(that, args);});
-};
-XHRCorsObject.prototype = new AbstractXHRObject();
-
-var XHRLocalObject = utils.XHRLocalObject = function(method, url, payload) {
-    var that = this;
-    utils.delay(function(){
-        that._start(method, url, payload, {
-            no_credentials: true
-        });
-    });
-};
-XHRLocalObject.prototype = new AbstractXHRObject();
-
-
-
-// References:
-//   http://ajaxian.com/archives/100-line-ajax-wrapper
-//   http://msdn.microsoft.com/en-us/library/cc288060(v=VS.85).aspx
-var XDRObject = utils.XDRObject = function(method, url, payload) {
-    var that = this;
-    utils.delay(function(){that._start(method, url, payload);});
-};
-XDRObject.prototype = new EventEmitter(['chunk', 'finish']);
-XDRObject.prototype._start = function(method, url, payload) {
-    var that = this;
-    var xdr = new XDomainRequest();
-    // IE caches even POSTs
-    url += ((url.indexOf('?') === -1) ? '?' : '&') + 't='+(+new Date);
-
-    var onerror = xdr.ontimeout = xdr.onerror = function() {
-        that.emit('finish', 0, '');
-        that._cleanup(false);
-    };
-    xdr.onprogress = function() {
-        that.emit('chunk', 200, xdr.responseText);
-    };
-    xdr.onload = function() {
-        that.emit('finish', 200, xdr.responseText);
-        that._cleanup(false);
-    };
-    that.xdr = xdr;
-    that.unload_ref = utils.unload_add(function(){that._cleanup(true);});
-    try {
-        // Fails with AccessDenied if port number is bogus
-        that.xdr.open(method, url);
-        that.xdr.send(payload);
-    } catch(x) {
-        onerror();
-    }
-};
-
-XDRObject.prototype._cleanup = function(abort) {
-    var that = this;
-    if (!that.xdr) return;
-    utils.unload_del(that.unload_ref);
-
-    that.xdr.ontimeout = that.xdr.onerror = that.xdr.onprogress =
-        that.xdr.onload = null;
-    if (abort) {
-        try {
-            that.xdr.abort();
-        } catch(x) {};
-    }
-    that.unload_ref = that.xdr = null;
-};
-
-XDRObject.prototype.close = function() {
-    var that = this;
-    that.nuke();
-    that._cleanup(true);
-};
-
-// 1. Is natively via XHR
-// 2. Is natively via XDR
-// 3. Nope, but postMessage is there so it should work via the Iframe.
-// 4. Nope, sorry.
-utils.isXHRCorsCapable = function() {
-    if (_window.XMLHttpRequest && 'withCredentials' in new XMLHttpRequest()) {
-        return 1;
-    }
-    // XDomainRequest doesn't work if page is served from file://
-    if (_window.XDomainRequest && _document.domain) {
-        return 2;
-    }
-    if (IframeTransport.enabled()) {
-        return 3;
-    }
-    return 4;
-};
-//         [*] End of lib/dom2.js
-
-
-//         [*] Including lib/sockjs.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var SockJS = function(url, dep_protocols_whitelist, options) {
-    if (this === _window) {
-        // makes `new` optional
-        return new SockJS(url, dep_protocols_whitelist, options);
-    }
-    
-    var that = this, protocols_whitelist;
-    that._options = {devel: false, debug: false, protocols_whitelist: [],
-                     info: undefined, rtt: undefined};
-    if (options) {
-        utils.objectExtend(that._options, options);
-    }
-    that._base_url = utils.amendUrl(url);
-    that._server = that._options.server || utils.random_number_string(1000);
-    if (that._options.protocols_whitelist &&
-        that._options.protocols_whitelist.length) {
-        protocols_whitelist = that._options.protocols_whitelist;
-    } else {
-        // Deprecated API
-        if (typeof dep_protocols_whitelist === 'string' &&
-            dep_protocols_whitelist.length > 0) {
-            protocols_whitelist = [dep_protocols_whitelist];
-        } else if (utils.isArray(dep_protocols_whitelist)) {
-            protocols_whitelist = dep_protocols_whitelist
-        } else {
-            protocols_whitelist = null;
-        }
-        if (protocols_whitelist) {
-            that._debug('Deprecated API: Use "protocols_whitelist" option ' +
-                        'instead of supplying protocol list as a second ' +
-                        'parameter to SockJS constructor.');
-        }
-    }
-    that._protocols = [];
-    that.protocol = null;
-    that.readyState = SockJS.CONNECTING;
-    that._ir = createInfoReceiver(that._base_url);
-    that._ir.onfinish = function(info, rtt) {
-        that._ir = null;
-        if (info) {
-            if (that._options.info) {
-                // Override if user supplies the option
-                info = utils.objectExtend(info, that._options.info);
-            }
-            if (that._options.rtt) {
-                rtt = that._options.rtt;
-            }
-            that._applyInfo(info, rtt, protocols_whitelist);
-            that._didClose();
-        } else {
-            that._didClose(1002, 'Can\'t connect to server', true);
-        }
-    };
-};
-// Inheritance
-SockJS.prototype = new REventTarget();
-
-SockJS.version = "0.3.4";
-
-SockJS.CONNECTING = 0;
-SockJS.OPEN = 1;
-SockJS.CLOSING = 2;
-SockJS.CLOSED = 3;
-
-SockJS.prototype._debug = function() {
-    if (this._options.debug)
-        utils.log.apply(utils, arguments);
-};
-
-SockJS.prototype._dispatchOpen = function() {
-    var that = this;
-    if (that.readyState === SockJS.CONNECTING) {
-        if (that._transport_tref) {
-            clearTimeout(that._transport_tref);
-            that._transport_tref = null;
-        }
-        that.readyState = SockJS.OPEN;
-        that.dispatchEvent(new SimpleEvent("open"));
-    } else {
-        // The server might have been restarted, and lost track of our
-        // connection.
-        that._didClose(1006, "Server lost session");
-    }
-};
-
-SockJS.prototype._dispatchMessage = function(data) {
-    var that = this;
-    if (that.readyState !== SockJS.OPEN)
-            return;
-    that.dispatchEvent(new SimpleEvent("message", {data: data}));
-};
-
-SockJS.prototype._dispatchHeartbeat = function(data) {
-    var that = this;
-    if (that.readyState !== SockJS.OPEN)
-        return;
-    that.dispatchEvent(new SimpleEvent('heartbeat', {}));
-};
-
-SockJS.prototype._didClose = function(code, reason, force) {
-    var that = this;
-    if (that.readyState !== SockJS.CONNECTING &&
-        that.readyState !== SockJS.OPEN &&
-        that.readyState !== SockJS.CLOSING)
-            throw new Error('INVALID_STATE_ERR');
-    if (that._ir) {
-        that._ir.nuke();
-        that._ir = null;
-    }
-
-    if (that._transport) {
-        that._transport.doCleanup();
-        that._transport = null;
-    }
-
-    var close_event = new SimpleEvent("close", {
-        code: code,
-        reason: reason,
-        wasClean: utils.userSetCode(code)});
-
-    if (!utils.userSetCode(code) &&
-        that.readyState === SockJS.CONNECTING && !force) {
-        if (that._try_next_protocol(close_event)) {
-            return;
-        }
-        close_event = new SimpleEvent("close", {code: 2000,
-                                                reason: "All transports failed",
-                                                wasClean: false,
-                                                last_event: close_event});
-    }
-    that.readyState = SockJS.CLOSED;
-
-    utils.delay(function() {
-                   that.dispatchEvent(close_event);
-                });
-};
-
-SockJS.prototype._didMessage = function(data) {
-    var that = this;
-    var type = data.slice(0, 1);
-    switch(type) {
-    case 'o':
-        that._dispatchOpen();
-        break;
-    case 'a':
-        var payload = JSON.parse(data.slice(1) || '[]');
-        for(var i=0; i < payload.length; i++){
-            that._dispatchMessage(payload[i]);
-        }
-        break;
-    case 'm':
-        var payload = JSON.parse(data.slice(1) || 'null');
-        that._dispatchMessage(payload);
-        break;
-    case 'c':
-        var payload = JSON.parse(data.slice(1) || '[]');
-        that._didClose(payload[0], payload[1]);
-        break;
-    case 'h':
-        that._dispatchHeartbeat();
-        break;
-    }
-};
-
-SockJS.prototype._try_next_protocol = function(close_event) {
-    var that = this;
-    if (that.protocol) {
-        that._debug('Closed transport:', that.protocol, ''+close_event);
-        that.protocol = null;
-    }
-    if (that._transport_tref) {
-        clearTimeout(that._transport_tref);
-        that._transport_tref = null;
-    }
-
-    while(1) {
-        var protocol = that.protocol = that._protocols.shift();
-        if (!protocol) {
-            return false;
-        }
-        // Some protocols require access to `body`, what if were in
-        // the `head`?
-        if (SockJS[protocol] &&
-            SockJS[protocol].need_body === true &&
-            (!_document.body ||
-             (typeof _document.readyState !== 'undefined'
-              && _document.readyState !== 'complete'))) {
-            that._protocols.unshift(protocol);
-            that.protocol = 'waiting-for-load';
-            utils.attachEvent('load', function(){
-                that._try_next_protocol();
-            });
-            return true;
-        }
-
-        if (!SockJS[protocol] ||
-              !SockJS[protocol].enabled(that._options)) {
-            that._debug('Skipping transport:', protocol);
-        } else {
-            var roundTrips = SockJS[protocol].roundTrips || 1;
-            var to = ((that._options.rto || 0) * roundTrips) || 5000;
-            that._transport_tref = utils.delay(to, function() {
-                if (that.readyState === SockJS.CONNECTING) {
-                    // I can't understand how it is possible to run
-                    // this timer, when the state is CLOSED, but
-                    // apparently in IE everythin is possible.
-                    that._didClose(2007, "Transport timeouted");
-                }
-            });
-
-            var connid = utils.random_string(8);
-            var trans_url = that._base_url + '/' + that._server + '/' + connid;
-            that._debug('Opening transport:', protocol, ' url:'+trans_url,
-                        ' RTO:'+that._options.rto);
-            that._transport = new SockJS[protocol](that, trans_url,
-                                                   that._base_url);
-            return true;
-        }
-    }
-};
-
-SockJS.prototype.close = function(code, reason) {
-    var that = this;
-    if (code && !utils.userSetCode(code))
-        throw new Error("INVALID_ACCESS_ERR");
-    if(that.readyState !== SockJS.CONNECTING &&
-       that.readyState !== SockJS.OPEN) {
-        return false;
-    }
-    that.readyState = SockJS.CLOSING;
-    that._didClose(code || 1000, reason || "Normal closure");
-    return true;
-};
-
-SockJS.prototype.send = function(data) {
-    var that = this;
-    if (that.readyState === SockJS.CONNECTING)
-        throw new Error('INVALID_STATE_ERR');
-    if (that.readyState === SockJS.OPEN) {
-        that._transport.doSend(utils.quote('' + data));
-    }
-    return true;
-};
-
-SockJS.prototype._applyInfo = function(info, rtt, protocols_whitelist) {
-    var that = this;
-    that._options.info = info;
-    that._options.rtt = rtt;
-    that._options.rto = utils.countRTO(rtt);
-    that._options.info.null_origin = !_document.domain;
-    var probed = utils.probeProtocols();
-    that._protocols = utils.detectProtocols(probed, protocols_whitelist, info);
-};
-//         [*] End of lib/sockjs.js
-
-
-//         [*] Including lib/trans-websocket.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var WebSocketTransport = SockJS.websocket = function(ri, trans_url) {
-    var that = this;
-    var url = trans_url + '/websocket';
-    if (url.slice(0, 5) === 'https') {
-        url = 'wss' + url.slice(5);
-    } else {
-        url = 'ws' + url.slice(4);
-    }
-    that.ri = ri;
-    that.url = url;
-    var Constructor = _window.WebSocket || _window.MozWebSocket;
-
-    that.ws = new Constructor(that.url);
-    that.ws.onmessage = function(e) {
-        that.ri._didMessage(e.data);
-    };
-    // Firefox has an interesting bug. If a websocket connection is
-    // created after onunload, it stays alive even when user
-    // navigates away from the page. In such situation let's lie -
-    // let's not open the ws connection at all. See:
-    // https://github.com/sockjs/sockjs-client/issues/28
-    // https://bugzilla.mozilla.org/show_bug.cgi?id=696085
-    that.unload_ref = utils.unload_add(function(){that.ws.close()});
-    that.ws.onclose = function() {
-        that.ri._didMessage(utils.closeFrame(1006, "WebSocket connection broken"));
-    };
-};
-
-WebSocketTransport.prototype.doSend = function(data) {
-    this.ws.send('[' + data + ']');
-};
-
-WebSocketTransport.prototype.doCleanup = function() {
-    var that = this;
-    var ws = that.ws;
-    if (ws) {
-        ws.onmessage = ws.onclose = null;
-        ws.close();
-        utils.unload_del(that.unload_ref);
-        that.unload_ref = that.ri = that.ws = null;
-    }
-};
-
-WebSocketTransport.enabled = function() {
-    return !!(_window.WebSocket || _window.MozWebSocket);
-};
-
-// In theory, ws should require 1 round trip. But in chrome, this is
-// not very stable over SSL. Most likely a ws connection requires a
-// separate SSL connection, in which case 2 round trips are an
-// absolute minumum.
-WebSocketTransport.roundTrips = 2;
-//         [*] End of lib/trans-websocket.js
-
-
-//         [*] Including lib/trans-sender.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var BufferedSender = function() {};
-BufferedSender.prototype.send_constructor = function(sender) {
-    var that = this;
-    that.send_buffer = [];
-    that.sender = sender;
-};
-BufferedSender.prototype.doSend = function(message) {
-    var that = this;
-    that.send_buffer.push(message);
-    if (!that.send_stop) {
-        that.send_schedule();
-    }
-};
-
-// For polling transports in a situation when in the message callback,
-// new message is being send. If the sending connection was started
-// before receiving one, it is possible to saturate the network and
-// timeout due to the lack of receiving socket. To avoid that we delay
-// sending messages by some small time, in order to let receiving
-// connection be started beforehand. This is only a halfmeasure and
-// does not fix the big problem, but it does make the tests go more
-// stable on slow networks.
-BufferedSender.prototype.send_schedule_wait = function() {
-    var that = this;
-    var tref;
-    that.send_stop = function() {
-        that.send_stop = null;
-        clearTimeout(tref);
-    };
-    tref = utils.delay(25, function() {
-        that.send_stop = null;
-        that.send_schedule();
-    });
-};
-
-BufferedSender.prototype.send_schedule = function() {
-    var that = this;
-    if (that.send_buffer.length > 0) {
-        var payload = '[' + that.send_buffer.join(',') + ']';
-        that.send_stop = that.sender(that.trans_url, payload, function(success, abort_reason) {
-            that.send_stop = null;
-            if (success === false) {
-                that.ri._didClose(1006, 'Sending error ' + abort_reason);
-            } else {
-                that.send_schedule_wait();
-            }
-        });
-        that.send_buffer = [];
-    }
-};
-
-BufferedSender.prototype.send_destructor = function() {
-    var that = this;
-    if (that._send_stop) {
-        that._send_stop();
-    }
-    that._send_stop = null;
-};
-
-var jsonPGenericSender = function(url, payload, callback) {
-    var that = this;
-
-    if (!('_send_form' in that)) {
-        var form = that._send_form = _document.createElement('form');
-        var area = that._send_area = _document.createElement('textarea');
-        area.name = 'd';
-        form.style.display = 'none';
-        form.style.position = 'absolute';
-        form.method = 'POST';
-        form.enctype = 'application/x-www-form-urlencoded';
-        form.acceptCharset = "UTF-8";
-        form.appendChild(area);
-        _document.body.appendChild(form);
-    }
-    var form = that._send_form;
-    var area = that._send_area;
-    var id = 'a' + utils.random_string(8);
-    form.target = id;
-    form.action = url + '/jsonp_send?i=' + id;
-
-    var iframe;
-    try {
-        // ie6 dynamic iframes with target="" support (thanks Chris Lambacher)
-        iframe = _document.createElement('<iframe name="'+ id +'">');
-    } catch(x) {
-        iframe = _document.createElement('iframe');
-        iframe.name = id;
-    }
-    iframe.id = id;
-    form.appendChild(iframe);
-    iframe.style.display = 'none';
-
-    try {
-        area.value = payload;
-    } catch(e) {
-        utils.log('Your browser is seriously broken. Go home! ' + e.message);
-    }
-    form.submit();
-
-    var completed = function(e) {
-        if (!iframe.onerror) return;
-        iframe.onreadystatechange = iframe.onerror = iframe.onload = null;
-        // Opera mini doesn't like if we GC iframe
-        // immediately, thus this timeout.
-        utils.delay(500, function() {
-                       iframe.parentNode.removeChild(iframe);
-                       iframe = null;
-                   });
-        area.value = '';
-        // It is not possible to detect if the iframe succeeded or
-        // failed to submit our form.
-        callback(true);
-    };
-    iframe.onerror = iframe.onload = completed;
-    iframe.onreadystatechange = function(e) {
-        if (iframe.readyState == 'complete') completed();
-    };
-    return completed;
-};
-
-var createAjaxSender = function(AjaxObject) {
-    return function(url, payload, callback) {
-        var xo = new AjaxObject('POST', url + '/xhr_send', payload);
-        xo.onfinish = function(status, text) {
-            callback(status === 200 || status === 204,
-                     'http status ' + status);
-        };
-        return function(abort_reason) {
-            callback(false, abort_reason);
-        };
-    };
-};
-//         [*] End of lib/trans-sender.js
-
-
-//         [*] Including lib/trans-jsonp-receiver.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-// Parts derived from Socket.io:
-//    https://github.com/LearnBoost/socket.io/blob/0.6.17/lib/socket.io/transports/jsonp-polling.js
-// and jQuery-JSONP:
-//    https://code.google.com/p/jquery-jsonp/source/browse/trunk/core/jquery.jsonp.js
-var jsonPGenericReceiver = function(url, callback) {
-    var tref;
-    var script = _document.createElement('script');
-    var script2;  // Opera synchronous load trick.
-    var close_script = function(frame) {
-        if (script2) {
-            script2.parentNode.removeChild(script2);
-            script2 = null;
-        }
-        if (script) {
-            clearTimeout(tref);
-            // Unfortunately, you can't really abort script loading of
-            // the script.
-            script.parentNode.removeChild(script);
-            script.onreadystatechange = script.onerror =
-                script.onload = script.onclick = null;
-            script = null;
-            callback(frame);
-            callback = null;
-        }
-    };
-
-    // IE9 fires 'error' event after orsc or before, in random order.
-    var loaded_okay = false;
-    var error_timer = null;
-
-    script.id = 'a' + utils.random_string(8);
-    script.src = url;
-    script.type = 'text/javascript';
-    script.charset = 'UTF-8';
-    script.onerror = function(e) {
-        if (!error_timer) {
-            // Delay firing close_script.
-            error_timer = setTimeout(function() {
-                if (!loaded_okay) {
-                    close_script(utils.closeFrame(
-                        1006,
-                        "JSONP script loaded abnormally (onerror)"));
-                }
-            }, 1000);
-        }
-    };
-    script.onload = function(e) {
-        close_script(utils.closeFrame(1006, "JSONP script loaded abnormally (onload)"));
-    };
-
-    script.onreadystatechange = function(e) {
-        if (/loaded|closed/.test(script.readyState)) {
-            if (script && script.htmlFor && script.onclick) {
-                loaded_okay = true;
-                try {
-                    // In IE, actually execute the script.
-                    script.onclick();
-                } catch (x) {}
-            }
-            if (script) {
-                close_script(utils.closeFrame(1006, "JSONP script loaded abnormally (onreadystatechange)"));
-            }
-        }
-    };
-    // IE: event/htmlFor/onclick trick.
-    // One can't rely on proper order for onreadystatechange. In order to
-    // make sure, set a 'htmlFor' and 'event' properties, so that
-    // script code will be installed as 'onclick' handler for the
-    // script object. Later, onreadystatechange, manually execute this
-    // code. FF and Chrome doesn't work with 'event' and 'htmlFor'
-    // set. For reference see:
-    //   http://jaubourg.net/2010/07/loading-script-as-onclick-handler-of.html
-    // Also, read on that about script ordering:
-    //   http://wiki.whatwg.org/wiki/Dynamic_Script_Execution_Order
-    if (typeof script.async === 'undefined' && _document.attachEvent) {
-        // According to mozilla docs, in recent browsers script.async defaults
-        // to 'true', so we may use it to detect a good browser:
-        // https://developer.mozilla.org/en/HTML/Element/script
-        if (!/opera/i.test(navigator.userAgent)) {
-            // Naively assume we're in IE
-            try {
-                script.htmlFor = script.id;
-                script.event = "onclick";
-            } catch (x) {}
-            script.async = true;
-        } else {
-            // Opera, second sync script hack
-            script2 = _document.createElement('script');
-            script2.text = "try{var a = document.getElementById('"+script.id+"'); if(a)a.onerror();}catch(x){};";
-            script.async = script2.async = false;
-        }
-    }
-    if (typeof script.async !== 'undefined') {
-        script.async = true;
-    }
-
-    // Fallback mostly for Konqueror - stupid timer, 35 seconds shall be plenty.
-    tref = setTimeout(function() {
-                          close_script(utils.closeFrame(1006, "JSONP script loaded abnormally (timeout)"));
-                      }, 35000);
-
-    var head = _document.getElementsByTagName('head')[0];
-    head.insertBefore(script, head.firstChild);
-    if (script2) {
-        head.insertBefore(script2, head.firstChild);
-    }
-    return close_script;
-};
-//         [*] End of lib/trans-jsonp-receiver.js
-
-
-//         [*] Including lib/trans-jsonp-polling.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-// The simplest and most robust transport, using the well-know cross
-// domain hack - JSONP. This transport is quite inefficient - one
-// mssage could use up to one http request. But at least it works almost
-// everywhere.
-// Known limitations:
-//   o you will get a spinning cursor
-//   o for Konqueror a dumb timer is needed to detect errors
-
-
-var JsonPTransport = SockJS['jsonp-polling'] = function(ri, trans_url) {
-    utils.polluteGlobalNamespace();
-    var that = this;
-    that.ri = ri;
-    that.trans_url = trans_url;
-    that.send_constructor(jsonPGenericSender);
-    that._schedule_recv();
-};
-
-// Inheritnace
-JsonPTransport.prototype = new BufferedSender();
-
-JsonPTransport.prototype._schedule_recv = function() {
-    var that = this;
-    var callback = function(data) {
-        that._recv_stop = null;
-        if (data) {
-            // no data - heartbeat;
-            if (!that._is_closing) {
-                that.ri._didMessage(data);
-            }
-        }
-        // The message can be a close message, and change is_closing state.
-        if (!that._is_closing) {
-            that._schedule_recv();
-        }
-    };
-    that._recv_stop = jsonPReceiverWrapper(that.trans_url + '/jsonp',
-                                           jsonPGenericReceiver, callback);
-};
-
-JsonPTransport.enabled = function() {
-    return true;
-};
-
-JsonPTransport.need_body = true;
-
-
-JsonPTransport.prototype.doCleanup = function() {
-    var that = this;
-    that._is_closing = true;
-    if (that._recv_stop) {
-        that._recv_stop();
-    }
-    that.ri = that._recv_stop = null;
-    that.send_destructor();
-};
-
-
-// Abstract away code that handles global namespace pollution.
-var jsonPReceiverWrapper = function(url, constructReceiver, user_callback) {
-    var id = 'a' + utils.random_string(6);
-    var url_id = url + '?c=' + escape(WPrefix + '.' + id);
-
-    // Unfortunately it is not possible to abort loading of the
-    // script. We need to keep track of frake close frames.
-    var aborting = 0;
-
-    // Callback will be called exactly once.
-    var callback = function(frame) {
-        switch(aborting) {
-        case 0:
-            // Normal behaviour - delete hook _and_ emit message.
-            delete _window[WPrefix][id];
-            user_callback(frame);
-            break;
-        case 1:
-            // Fake close frame - emit but don't delete hook.
-            user_callback(frame);
-            aborting = 2;
-            break;
-        case 2:
-            // Got frame after connection was closed, delete hook, don't emit.
-            delete _window[WPrefix][id];
-            break;
-        }
-    };
-
-    var close_script = constructReceiver(url_id, callback);
-    _window[WPrefix][id] = close_script;
-    var stop = function() {
-        if (_window[WPrefix][id]) {
-            aborting = 1;
-            _window[WPrefix][id](utils.closeFrame(1000, "JSONP user aborted read"));
-        }
-    };
-    return stop;
-};
-//         [*] End of lib/trans-jsonp-polling.js
-
-
-//         [*] Including lib/trans-xhr.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var AjaxBasedTransport = function() {};
-AjaxBasedTransport.prototype = new BufferedSender();
-
-AjaxBasedTransport.prototype.run = function(ri, trans_url,
-                                            url_suffix, Receiver, AjaxObject) {
-    var that = this;
-    that.ri = ri;
-    that.trans_url = trans_url;
-    that.send_constructor(createAjaxSender(AjaxObject));
-    that.poll = new Polling(ri, Receiver,
-                            trans_url + url_suffix, AjaxObject);
-};
-
-AjaxBasedTransport.prototype.doCleanup = function() {
-    var that = this;
-    if (that.poll) {
-        that.poll.abort();
-        that.poll = null;
-    }
-};
-
-// xhr-streaming
-var XhrStreamingTransport = SockJS['xhr-streaming'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/xhr_streaming', XhrReceiver, utils.XHRCorsObject);
-};
-
-XhrStreamingTransport.prototype = new AjaxBasedTransport();
-
-XhrStreamingTransport.enabled = function() {
-    // Support for CORS Ajax aka Ajax2? Opera 12 claims CORS but
-    // doesn't do streaming.
-    return (_window.XMLHttpRequest &&
-            'withCredentials' in new XMLHttpRequest() &&
-            (!/opera/i.test(navigator.userAgent)));
-};
-XhrStreamingTransport.roundTrips = 2; // preflight, ajax
-
-// Safari gets confused when a streaming ajax request is started
-// before onload. This causes the load indicator to spin indefinetely.
-XhrStreamingTransport.need_body = true;
-
-
-// According to:
-//   http://stackoverflow.com/questions/1641507/detect-browser-support-for-cross-domain-xmlhttprequests
-//   http://hacks.mozilla.org/2009/07/cross-site-xmlhttprequest-with-cors/
-
-
-// xdr-streaming
-var XdrStreamingTransport = SockJS['xdr-streaming'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/xhr_streaming', XhrReceiver, utils.XDRObject);
-};
-
-XdrStreamingTransport.prototype = new AjaxBasedTransport();
-
-XdrStreamingTransport.enabled = function() {
-    return !!_window.XDomainRequest;
-};
-XdrStreamingTransport.roundTrips = 2; // preflight, ajax
-
-
-
-// xhr-polling
-var XhrPollingTransport = SockJS['xhr-polling'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/xhr', XhrReceiver, utils.XHRCorsObject);
-};
-
-XhrPollingTransport.prototype = new AjaxBasedTransport();
-
-XhrPollingTransport.enabled = XhrStreamingTransport.enabled;
-XhrPollingTransport.roundTrips = 2; // preflight, ajax
-
-
-// xdr-polling
-var XdrPollingTransport = SockJS['xdr-polling'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/xhr', XhrReceiver, utils.XDRObject);
-};
-
-XdrPollingTransport.prototype = new AjaxBasedTransport();
-
-XdrPollingTransport.enabled = XdrStreamingTransport.enabled;
-XdrPollingTransport.roundTrips = 2; // preflight, ajax
-//         [*] End of lib/trans-xhr.js
-
-
-//         [*] Including lib/trans-iframe.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-// Few cool transports do work only for same-origin. In order to make
-// them working cross-domain we shall use iframe, served form the
-// remote domain. New browsers, have capabilities to communicate with
-// cross domain iframe, using postMessage(). In IE it was implemented
-// from IE 8+, but of course, IE got some details wrong:
-//    http://msdn.microsoft.com/en-us/library/cc197015(v=VS.85).aspx
-//    http://stevesouders.com/misc/test-postmessage.php
-
-var IframeTransport = function() {};
-
-IframeTransport.prototype.i_constructor = function(ri, trans_url, base_url) {
-    var that = this;
-    that.ri = ri;
-    that.origin = utils.getOrigin(base_url);
-    that.base_url = base_url;
-    that.trans_url = trans_url;
-
-    var iframe_url = base_url + '/iframe.html';
-    if (that.ri._options.devel) {
-        iframe_url += '?t=' + (+new Date);
-    }
-    that.window_id = utils.random_string(8);
-    iframe_url += '#' + that.window_id;
-
-    that.iframeObj = utils.createIframe(iframe_url, function(r) {
-                                            that.ri._didClose(1006, "Unable to load an iframe (" + r + ")");
-                                        });
-
-    that.onmessage_cb = utils.bind(that.onmessage, that);
-    utils.attachMessage(that.onmessage_cb);
-};
-
-IframeTransport.prototype.doCleanup = function() {
-    var that = this;
-    if (that.iframeObj) {
-        utils.detachMessage(that.onmessage_cb);
-        try {
-            // When the iframe is not loaded, IE raises an exception
-            // on 'contentWindow'.
-            if (that.iframeObj.iframe.contentWindow) {
-                that.postMessage('c');
-            }
-        } catch (x) {}
-        that.iframeObj.cleanup();
-        that.iframeObj = null;
-        that.onmessage_cb = that.iframeObj = null;
-    }
-};
-
-IframeTransport.prototype.onmessage = function(e) {
-    var that = this;
-    if (e.origin !== that.origin) return;
-    var window_id = e.data.slice(0, 8);
-    var type = e.data.slice(8, 9);
-    var data = e.data.slice(9);
-
-    if (window_id !== that.window_id) return;
-
-    switch(type) {
-    case 's':
-        that.iframeObj.loaded();
-        that.postMessage('s', JSON.stringify([SockJS.version, that.protocol, that.trans_url, that.base_url]));
-        break;
-    case 't':
-        that.ri._didMessage(data);
-        break;
-    }
-};
-
-IframeTransport.prototype.postMessage = function(type, data) {
-    var that = this;
-    that.iframeObj.post(that.window_id + type + (data || ''), that.origin);
-};
-
-IframeTransport.prototype.doSend = function (message) {
-    this.postMessage('m', message);
-};
-
-IframeTransport.enabled = function() {
-    // postMessage misbehaves in konqueror 4.6.5 - the messages are delivered with
-    // huge delay, or not at all.
-    var konqueror = navigator && navigator.userAgent && navigator.userAgent.indexOf('Konqueror') !== -1;
-    return ((typeof _window.postMessage === 'function' ||
-            typeof _window.postMessage === 'object') && (!konqueror));
-};
-//         [*] End of lib/trans-iframe.js
-
-
-//         [*] Including lib/trans-iframe-within.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var curr_window_id;
-
-var postMessage = function (type, data) {
-    if(parent !== _window) {
-        parent.postMessage(curr_window_id + type + (data || ''), '*');
-    } else {
-        utils.log("Can't postMessage, no parent window.", type, data);
-    }
-};
-
-var FacadeJS = function() {};
-FacadeJS.prototype._didClose = function (code, reason) {
-    postMessage('t', utils.closeFrame(code, reason));
-};
-FacadeJS.prototype._didMessage = function (frame) {
-    postMessage('t', frame);
-};
-FacadeJS.prototype._doSend = function (data) {
-    this._transport.doSend(data);
-};
-FacadeJS.prototype._doCleanup = function () {
-    this._transport.doCleanup();
-};
-
-utils.parent_origin = undefined;
-
-SockJS.bootstrap_iframe = function() {
-    var facade;
-    curr_window_id = _document.location.hash.slice(1);
-    var onMessage = function(e) {
-        if(e.source !== parent) return;
-        if(typeof utils.parent_origin === 'undefined')
-            utils.parent_origin = e.origin;
-        if (e.origin !== utils.parent_origin) return;
-
-        var window_id = e.data.slice(0, 8);
-        var type = e.data.slice(8, 9);
-        var data = e.data.slice(9);
-        if (window_id !== curr_window_id) return;
-        switch(type) {
-        case 's':
-            var p = JSON.parse(data);
-            var version = p[0];
-            var protocol = p[1];
-            var trans_url = p[2];
-            var base_url = p[3];
-            if (version !== SockJS.version) {
-                utils.log("Incompatibile SockJS! Main site uses:" +
-                          " \"" + version + "\", the iframe:" +
-                          " \"" + SockJS.version + "\".");
-            }
-            if (!utils.flatUrl(trans_url) || !utils.flatUrl(base_url)) {
-                utils.log("Only basic urls are supported in SockJS");
-                return;
-            }
-
-            if (!utils.isSameOriginUrl(trans_url) ||
-                !utils.isSameOriginUrl(base_url)) {
-                utils.log("Can't connect to different domain from within an " +
-                          "iframe. (" + JSON.stringify([_window.location.href, trans_url, base_url]) +
-                          ")");
-                return;
-            }
-            facade = new FacadeJS();
-            facade._transport = new FacadeJS[protocol](facade, trans_url, base_url);
-            break;
-        case 'm':
-            facade._doSend(data);
-            break;
-        case 'c':
-            if (facade)
-                facade._doCleanup();
-            facade = null;
-            break;
-        }
-    };
-
-    // alert('test ticker');
-    // facade = new FacadeJS();
-    // facade._transport = new FacadeJS['w-iframe-xhr-polling'](facade, 'http://host.com:9999/ticker/12/basd');
-
-    utils.attachMessage(onMessage);
-
-    // Start
-    postMessage('s');
-};
-//         [*] End of lib/trans-iframe-within.js
-
-
-//         [*] Including lib/info.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var InfoReceiver = function(base_url, AjaxObject) {
-    var that = this;
-    utils.delay(function(){that.doXhr(base_url, AjaxObject);});
-};
-
-InfoReceiver.prototype = new EventEmitter(['finish']);
-
-InfoReceiver.prototype.doXhr = function(base_url, AjaxObject) {
-    var that = this;
-    var t0 = (new Date()).getTime();
-    var xo = new AjaxObject('GET', base_url + '/info');
-
-    var tref = utils.delay(8000,
-                           function(){xo.ontimeout();});
-
-    xo.onfinish = function(status, text) {
-        clearTimeout(tref);
-        tref = null;
-        if (status === 200) {
-            var rtt = (new Date()).getTime() - t0;
-            var info = JSON.parse(text);
-            if (typeof info !== 'object') info = {};
-            that.emit('finish', info, rtt);
-        } else {
-            that.emit('finish');
-        }
-    };
-    xo.ontimeout = function() {
-        xo.close();
-        that.emit('finish');
-    };
-};
-
-var InfoReceiverIframe = function(base_url) {
-    var that = this;
-    var go = function() {
-        var ifr = new IframeTransport();
-        ifr.protocol = 'w-iframe-info-receiver';
-        var fun = function(r) {
-            if (typeof r === 'string' && r.substr(0,1) === 'm') {
-                var d = JSON.parse(r.substr(1));
-                var info = d[0], rtt = d[1];
-                that.emit('finish', info, rtt);
-            } else {
-                that.emit('finish');
-            }
-            ifr.doCleanup();
-            ifr = null;
-        };
-        var mock_ri = {
-            _options: {},
-            _didClose: fun,
-            _didMessage: fun
-        };
-        ifr.i_constructor(mock_ri, base_url, base_url);
-    }
-    if(!_document.body) {
-        utils.attachEvent('load', go);
-    } else {
-        go();
-    }
-};
-InfoReceiverIframe.prototype = new EventEmitter(['finish']);
-
-
-var InfoReceiverFake = function() {
-    // It may not be possible to do cross domain AJAX to get the info
-    // data, for example for IE7. But we want to run JSONP, so let's
-    // fake the response, with rtt=2s (rto=6s).
-    var that = this;
-    utils.delay(function() {
-        that.emit('finish', {}, 2000);
-    });
-};
-InfoReceiverFake.prototype = new EventEmitter(['finish']);
-
-var createInfoReceiver = function(base_url) {
-    if (utils.isSameOriginUrl(base_url)) {
-        // If, for some reason, we have SockJS locally - there's no
-        // need to start up the complex machinery. Just use ajax.
-        return new InfoReceiver(base_url, utils.XHRLocalObject);
-    }
-    switch (utils.isXHRCorsCapable()) {
-    case 1:
-        // XHRLocalObject -> no_credentials=true
-        return new InfoReceiver(base_url, utils.XHRLocalObject);
-    case 2:
-        return new InfoReceiver(base_url, utils.XDRObject);
-    case 3:
-        // Opera
-        return new InfoReceiverIframe(base_url);
-    default:
-        // IE 7
-        return new InfoReceiverFake();
-    };
-};
-
-
-var WInfoReceiverIframe = FacadeJS['w-iframe-info-receiver'] = function(ri, _trans_url, base_url) {
-    var ir = new InfoReceiver(base_url, utils.XHRLocalObject);
-    ir.onfinish = function(info, rtt) {
-        ri._didMessage('m'+JSON.stringify([info, rtt]));
-        ri._didClose();
-    }
-};
-WInfoReceiverIframe.prototype.doCleanup = function() {};
-//         [*] End of lib/info.js
-
-
-//         [*] Including lib/trans-iframe-eventsource.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var EventSourceIframeTransport = SockJS['iframe-eventsource'] = function () {
-    var that = this;
-    that.protocol = 'w-iframe-eventsource';
-    that.i_constructor.apply(that, arguments);
-};
-
-EventSourceIframeTransport.prototype = new IframeTransport();
-
-EventSourceIframeTransport.enabled = function () {
-    return ('EventSource' in _window) && IframeTransport.enabled();
-};
-
-EventSourceIframeTransport.need_body = true;
-EventSourceIframeTransport.roundTrips = 3; // html, javascript, eventsource
-
-
-// w-iframe-eventsource
-var EventSourceTransport = FacadeJS['w-iframe-eventsource'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/eventsource', EventSourceReceiver, utils.XHRLocalObject);
-}
-EventSourceTransport.prototype = new AjaxBasedTransport();
-//         [*] End of lib/trans-iframe-eventsource.js
-
-
-//         [*] Including lib/trans-iframe-xhr-polling.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var XhrPollingIframeTransport = SockJS['iframe-xhr-polling'] = function () {
-    var that = this;
-    that.protocol = 'w-iframe-xhr-polling';
-    that.i_constructor.apply(that, arguments);
-};
-
-XhrPollingIframeTransport.prototype = new IframeTransport();
-
-XhrPollingIframeTransport.enabled = function () {
-    return _window.XMLHttpRequest && IframeTransport.enabled();
-};
-
-XhrPollingIframeTransport.need_body = true;
-XhrPollingIframeTransport.roundTrips = 3; // html, javascript, xhr
-
-
-// w-iframe-xhr-polling
-var XhrPollingITransport = FacadeJS['w-iframe-xhr-polling'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/xhr', XhrReceiver, utils.XHRLocalObject);
-};
-
-XhrPollingITransport.prototype = new AjaxBasedTransport();
-//         [*] End of lib/trans-iframe-xhr-polling.js
-
-
-//         [*] Including lib/trans-iframe-htmlfile.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-// This transport generally works in any browser, but will cause a
-// spinning cursor to appear in any browser other than IE.
-// We may test this transport in all browsers - why not, but in
-// production it should be only run in IE.
-
-var HtmlFileIframeTransport = SockJS['iframe-htmlfile'] = function () {
-    var that = this;
-    that.protocol = 'w-iframe-htmlfile';
-    that.i_constructor.apply(that, arguments);
-};
-
-// Inheritance.
-HtmlFileIframeTransport.prototype = new IframeTransport();
-
-HtmlFileIframeTransport.enabled = function() {
-    return IframeTransport.enabled();
-};
-
-HtmlFileIframeTransport.need_body = true;
-HtmlFileIframeTransport.roundTrips = 3; // html, javascript, htmlfile
-
-
-// w-iframe-htmlfile
-var HtmlFileTransport = FacadeJS['w-iframe-htmlfile'] = function(ri, trans_url) {
-    this.run(ri, trans_url, '/htmlfile', HtmlfileReceiver, utils.XHRLocalObject);
-};
-HtmlFileTransport.prototype = new AjaxBasedTransport();
-//         [*] End of lib/trans-iframe-htmlfile.js
-
-
-//         [*] Including lib/trans-polling.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var Polling = function(ri, Receiver, recv_url, AjaxObject) {
-    var that = this;
-    that.ri = ri;
-    that.Receiver = Receiver;
-    that.recv_url = recv_url;
-    that.AjaxObject = AjaxObject;
-    that._scheduleRecv();
-};
-
-Polling.prototype._scheduleRecv = function() {
-    var that = this;
-    var poll = that.poll = new that.Receiver(that.recv_url, that.AjaxObject);
-    var msg_counter = 0;
-    poll.onmessage = function(e) {
-        msg_counter += 1;
-        that.ri._didMessage(e.data);
-    };
-    poll.onclose = function(e) {
-        that.poll = poll = poll.onmessage = poll.onclose = null;
-        if (!that.poll_is_closing) {
-            if (e.reason === 'permanent') {
-                that.ri._didClose(1006, 'Polling error (' + e.reason + ')');
-            } else {
-                that._scheduleRecv();
-            }
-        }
-    };
-};
-
-Polling.prototype.abort = function() {
-    var that = this;
-    that.poll_is_closing = true;
-    if (that.poll) {
-        that.poll.abort();
-    }
-};
-//         [*] End of lib/trans-polling.js
-
-
-//         [*] Including lib/trans-receiver-eventsource.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var EventSourceReceiver = function(url) {
-    var that = this;
-    var es = new EventSource(url);
-    es.onmessage = function(e) {
-        that.dispatchEvent(new SimpleEvent('message',
-                                           {'data': unescape(e.data)}));
-    };
-    that.es_close = es.onerror = function(e, abort_reason) {
-        // ES on reconnection has readyState = 0 or 1.
-        // on network error it's CLOSED = 2
-        var reason = abort_reason ? 'user' :
-            (es.readyState !== 2 ? 'network' : 'permanent');
-        that.es_close = es.onmessage = es.onerror = null;
-        // EventSource reconnects automatically.
-        es.close();
-        es = null;
-        // Safari and chrome < 15 crash if we close window before
-        // waiting for ES cleanup. See:
-        //   https://code.google.com/p/chromium/issues/detail?id=89155
-        utils.delay(200, function() {
-                        that.dispatchEvent(new SimpleEvent('close', {reason: reason}));
-                    });
-    };
-};
-
-EventSourceReceiver.prototype = new REventTarget();
-
-EventSourceReceiver.prototype.abort = function() {
-    var that = this;
-    if (that.es_close) {
-        that.es_close({}, true);
-    }
-};
-//         [*] End of lib/trans-receiver-eventsource.js
-
-
-//         [*] Including lib/trans-receiver-htmlfile.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var _is_ie_htmlfile_capable;
-var isIeHtmlfileCapable = function() {
-    if (_is_ie_htmlfile_capable === undefined) {
-        if ('ActiveXObject' in _window) {
-            try {
-                _is_ie_htmlfile_capable = !!new ActiveXObject('htmlfile');
-            } catch (x) {}
-        } else {
-            _is_ie_htmlfile_capable = false;
-        }
-    }
-    return _is_ie_htmlfile_capable;
-};
-
-
-var HtmlfileReceiver = function(url) {
-    var that = this;
-    utils.polluteGlobalNamespace();
-
-    that.id = 'a' + utils.random_string(6, 26);
-    url += ((url.indexOf('?') === -1) ? '?' : '&') +
-        'c=' + escape(WPrefix + '.' + that.id);
-
-    var constructor = isIeHtmlfileCapable() ?
-        utils.createHtmlfile : utils.createIframe;
-
-    var iframeObj;
-    _window[WPrefix][that.id] = {
-        start: function () {
-            iframeObj.loaded();
-        },
-        message: function (data) {
-            that.dispatchEvent(new SimpleEvent('message', {'data': data}));
-        },
-        stop: function () {
-            that.iframe_close({}, 'network');
-        }
-    };
-    that.iframe_close = function(e, abort_reason) {
-        iframeObj.cleanup();
-        that.iframe_close = iframeObj = null;
-        delete _window[WPrefix][that.id];
-        that.dispatchEvent(new SimpleEvent('close', {reason: abort_reason}));
-    };
-    iframeObj = constructor(url, function(e) {
-                                that.iframe_close({}, 'permanent');
-                            });
-};
-
-HtmlfileReceiver.prototype = new REventTarget();
-
-HtmlfileReceiver.prototype.abort = function() {
-    var that = this;
-    if (that.iframe_close) {
-        that.iframe_close({}, 'user');
-    }
-};
-//         [*] End of lib/trans-receiver-htmlfile.js
-
-
-//         [*] Including lib/trans-receiver-xhr.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-var XhrReceiver = function(url, AjaxObject) {
-    var that = this;
-    var buf_pos = 0;
-
-    that.xo = new AjaxObject('POST', url, null);
-    that.xo.onchunk = function(status, text) {
-        if (status !== 200) return;
-        while (1) {
-            var buf = text.slice(buf_pos);
-            var p = buf.indexOf('\n');
-            if (p === -1) break;
-            buf_pos += p+1;
-            var msg = buf.slice(0, p);
-            that.dispatchEvent(new SimpleEvent('message', {data: msg}));
-        }
-    };
-    that.xo.onfinish = function(status, text) {
-        that.xo.onchunk(status, text);
-        that.xo = null;
-        var reason = status === 200 ? 'network' : 'permanent';
-        that.dispatchEvent(new SimpleEvent('close', {reason: reason}));
-    }
-};
-
-XhrReceiver.prototype = new REventTarget();
-
-XhrReceiver.prototype.abort = function() {
-    var that = this;
-    if (that.xo) {
-        that.xo.close();
-        that.dispatchEvent(new SimpleEvent('close', {reason: 'user'}));
-        that.xo = null;
-    }
-};
-//         [*] End of lib/trans-receiver-xhr.js
-
-
-//         [*] Including lib/test-hooks.js
-/*
- * ***** BEGIN LICENSE BLOCK *****
- * Copyright (c) 2011-2012 VMware, Inc.
- *
- * For the license see COPYING.
- * ***** END LICENSE BLOCK *****
- */
-
-// For testing
-SockJS.getUtils = function(){
-    return utils;
-};
-
-SockJS.getIframeTransport = function(){
-    return IframeTransport;
-};
-//         [*] End of lib/test-hooks.js
-
-                  return SockJS;
-          })();
-if ('_sockjs_onload' in window) setTimeout(_sockjs_onload, 1);
-
-// AMD compliance
-if (typeof define === 'function' && define.amd) {
-    define('sockjs', [], function(){return SockJS;});
-}
-//     [*] End of lib/index.js
-
-// [*] End of lib/all.js
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/stomp.js b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/stomp.js
deleted file mode 100644 (file)
index ff9c327..0000000
+++ /dev/null
@@ -1,396 +0,0 @@
-// Generated by CoffeeScript 1.6.3
-/*
-   Stomp Over WebSocket http://www.jmesnil.net/stomp-websocket/doc/ | Apache License V2.0
-*/
-
-
-(function() {
-  var Byte, Client, Frame, Stomp,
-    __hasProp = {}.hasOwnProperty;
-
-  Byte = {
-    LF: '\x0A',
-    NULL: '\x00'
-  };
-
-  Frame = (function() {
-    function Frame(command, headers, body) {
-      this.command = command;
-      this.headers = headers != null ? headers : {};
-      this.body = body != null ? body : '';
-    }
-
-    Frame.prototype.toString = function() {
-      var lines, name, value, _ref;
-      lines = [this.command];
-      _ref = this.headers;
-      for (name in _ref) {
-        if (!__hasProp.call(_ref, name)) continue;
-        value = _ref[name];
-        lines.push("" + name + ":" + value);
-      }
-      if (this.body) {
-        lines.push("content-length:" + ('' + this.body).length);
-      }
-      lines.push(Byte.LF + this.body);
-      return lines.join(Byte.LF);
-    };
-
-    Frame._unmarshallSingle = function(data) {
-      var body, chr, command, divider, headerLines, headers, i, idx, len, line, start, trim, _i, _j, _ref, _ref1;
-      divider = data.search(RegExp("" + Byte.LF + Byte.LF));
-      headerLines = data.substring(0, divider).split(Byte.LF);
-      command = headerLines.shift();
-      headers = {};
-      trim = function(str) {
-        return str.replace(/^\s+|\s+$/g, '');
-      };
-      line = idx = null;
-      for (i = _i = 0, _ref = headerLines.length; 0 <= _ref ? _i < _ref : _i > _ref; i = 0 <= _ref ? ++_i : --_i) {
-        line = headerLines[i];
-        idx = line.indexOf(':');
-        headers[trim(line.substring(0, idx))] = trim(line.substring(idx + 1));
-      }
-      body = '';
-      start = divider + 2;
-      if (headers['content-length']) {
-        len = parseInt(headers['content-length']);
-        body = ('' + data).substring(start, start + len);
-      } else {
-        chr = null;
-        for (i = _j = start, _ref1 = data.length; start <= _ref1 ? _j < _ref1 : _j > _ref1; i = start <= _ref1 ? ++_j : --_j) {
-          chr = data.charAt(i);
-          if (chr === Byte.NULL) {
-            break;
-          }
-          body += chr;
-        }
-      }
-      return new Frame(command, headers, body);
-    };
-
-    Frame.unmarshall = function(datas) {
-      var data;
-      return (function() {
-        var _i, _len, _ref, _results;
-        _ref = datas.split(RegExp("" + Byte.NULL + Byte.LF + "*"));
-        _results = [];
-        for (_i = 0, _len = _ref.length; _i < _len; _i++) {
-          data = _ref[_i];
-          if ((data != null ? data.length : void 0) > 0) {
-            _results.push(Frame._unmarshallSingle(data));
-          }
-        }
-        return _results;
-      })();
-    };
-
-    Frame.marshall = function(command, headers, body) {
-      var frame;
-      frame = new Frame(command, headers, body);
-      return frame.toString() + Byte.NULL;
-    };
-
-    return Frame;
-
-  })();
-
-  Client = (function() {
-    function Client(ws) {
-      this.ws = ws;
-      this.ws.binaryType = "arraybuffer";
-      this.counter = 0;
-      this.connected = false;
-      this.heartbeat = {
-        outgoing: 10000,
-        incoming: 10000
-      };
-      this.maxWebSocketFrameSize = 16 * 1024;
-      this.subscriptions = {};
-    }
-
-    Client.prototype.debug = function(message) {
-      var _ref;
-      return typeof window !== "undefined" && window !== null ? (_ref = window.console) != null ? _ref.log(message) : void 0 : void 0;
-    };
-
-    Client.prototype._transmit = function(command, headers, body) {
-      var out;
-      out = Frame.marshall(command, headers, body);
-      if (typeof this.debug === "function") {
-        this.debug(">>> " + out);
-      }
-      while (true) {
-        if (out.length > this.maxWebSocketFrameSize) {
-          this.ws.send(out.substring(0, this.maxWebSocketFrameSize));
-          out = out.substring(this.maxWebSocketFrameSize);
-          if (typeof this.debug === "function") {
-            this.debug("remaining = " + out.length);
-          }
-        } else {
-          return this.ws.send(out);
-        }
-      }
-    };
-
-    Client.prototype._setupHeartbeat = function(headers) {
-      var serverIncoming, serverOutgoing, ttl, v, _ref, _ref1,
-        _this = this;
-      if ((_ref = headers.version) !== Stomp.VERSIONS.V1_1 && _ref !== Stomp.VERSIONS.V1_2) {
-        return;
-      }
-      _ref1 = (function() {
-        var _i, _len, _ref1, _results;
-        _ref1 = headers['heart-beat'].split(",");
-        _results = [];
-        for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
-          v = _ref1[_i];
-          _results.push(parseInt(v));
-        }
-        return _results;
-      })(), serverOutgoing = _ref1[0], serverIncoming = _ref1[1];
-      if (!(this.heartbeat.outgoing === 0 || serverIncoming === 0)) {
-        ttl = Math.max(this.heartbeat.outgoing, serverIncoming);
-        if (typeof this.debug === "function") {
-          this.debug("send PING every " + ttl + "ms");
-        }
-        this.pinger = typeof window !== "undefined" && window !== null ? window.setInterval(function() {
-          _this.ws.send(Byte.LF);
-          return typeof _this.debug === "function" ? _this.debug(">>> PING") : void 0;
-        }, ttl) : void 0;
-      }
-      if (!(this.heartbeat.incoming === 0 || serverOutgoing === 0)) {
-        ttl = Math.max(this.heartbeat.incoming, serverOutgoing);
-        if (typeof this.debug === "function") {
-          this.debug("check PONG every " + ttl + "ms");
-        }
-        return this.ponger = typeof window !== "undefined" && window !== null ? window.setInterval(function() {
-          var delta;
-          delta = Date.now() - _this.serverActivity;
-          if (delta > ttl * 2) {
-            if (typeof _this.debug === "function") {
-              _this.debug("did not receive server activity for the last " + delta + "ms");
-            }
-            return _this.ws.close();
-          }
-        }, ttl) : void 0;
-      }
-    };
-
-    Client.prototype.connect = function(login, passcode, connectCallback, errorCallback, vhost) {
-      var _this = this;
-      this.connectCallback = connectCallback;
-      if (typeof this.debug === "function") {
-        this.debug("Opening Web Socket...");
-      }
-      this.ws.onmessage = function(evt) {
-        var arr, c, data, frame, onreceive, _i, _len, _ref, _results;
-        data = typeof ArrayBuffer !== 'undefined' && evt.data instanceof ArrayBuffer ? (arr = new Uint8Array(evt.data), typeof _this.debug === "function" ? _this.debug("--- got data length: " + arr.length) : void 0, ((function() {
-          var _i, _len, _results;
-          _results = [];
-          for (_i = 0, _len = arr.length; _i < _len; _i++) {
-            c = arr[_i];
-            _results.push(String.fromCharCode(c));
-          }
-          return _results;
-        })()).join('')) : evt.data;
-        _this.serverActivity = Date.now();
-        if (data === Byte.LF) {
-          if (typeof _this.debug === "function") {
-            _this.debug("<<< PONG");
-          }
-          return;
-        }
-        if (typeof _this.debug === "function") {
-          _this.debug("<<< " + data);
-        }
-        _ref = Frame.unmarshall(data);
-        _results = [];
-        for (_i = 0, _len = _ref.length; _i < _len; _i++) {
-          frame = _ref[_i];
-          switch (frame.command) {
-            case "CONNECTED":
-              if (typeof _this.debug === "function") {
-                _this.debug("connected to server " + frame.headers.server);
-              }
-              _this.connected = true;
-              _this._setupHeartbeat(frame.headers);
-              _results.push(typeof _this.connectCallback === "function" ? _this.connectCallback(frame) : void 0);
-              break;
-            case "MESSAGE":
-              onreceive = _this.subscriptions[frame.headers.subscription] || _this.onreceive;
-              if (onreceive) {
-                _results.push(onreceive(frame));
-              } else {
-                _results.push(typeof _this.debug === "function" ? _this.debug("Unhandled received MESSAGE: " + frame) : void 0);
-              }
-              break;
-            case "RECEIPT":
-              _results.push(typeof _this.onreceipt === "function" ? _this.onreceipt(frame) : void 0);
-              break;
-            case "ERROR":
-              _results.push(typeof errorCallback === "function" ? errorCallback(frame) : void 0);
-              break;
-            default:
-              _results.push(typeof _this.debug === "function" ? _this.debug("Unhandled frame: " + frame) : void 0);
-          }
-        }
-        return _results;
-      };
-      this.ws.onclose = function() {
-        var msg;
-        msg = "Whoops! Lost connection to " + _this.ws.url;
-        if (typeof _this.debug === "function") {
-          _this.debug(msg);
-        }
-        _this._cleanUp();
-        return typeof errorCallback === "function" ? errorCallback(msg) : void 0;
-      };
-      return this.ws.onopen = function() {
-        var headers;
-        if (typeof _this.debug === "function") {
-          _this.debug('Web Socket Opened...');
-        }
-        headers = {
-          "accept-version": Stomp.VERSIONS.supportedVersions(),
-          "heart-beat": [_this.heartbeat.outgoing, _this.heartbeat.incoming].join(',')
-        };
-        if (vhost) {
-          headers.host = vhost;
-        }
-        if (login) {
-          headers.login = login;
-        }
-        if (passcode) {
-          headers.passcode = passcode;
-        }
-        return _this._transmit("CONNECT", headers);
-      };
-    };
-
-    Client.prototype.disconnect = function(disconnectCallback) {
-      this._transmit("DISCONNECT");
-      this.ws.onclose = null;
-      this.ws.close();
-      this._cleanUp();
-      return typeof disconnectCallback === "function" ? disconnectCallback() : void 0;
-    };
-
-    Client.prototype._cleanUp = function() {
-      this.connected = false;
-      if (this.pinger) {
-        if (typeof window !== "undefined" && window !== null) {
-          window.clearInterval(this.pinger);
-        }
-      }
-      if (this.ponger) {
-        return typeof window !== "undefined" && window !== null ? window.clearInterval(this.ponger) : void 0;
-      }
-    };
-
-    Client.prototype.send = function(destination, headers, body) {
-      if (headers == null) {
-        headers = {};
-      }
-      if (body == null) {
-        body = '';
-      }
-      headers.destination = destination;
-      return this._transmit("SEND", headers, body);
-    };
-
-    Client.prototype.subscribe = function(destination, callback, headers) {
-      if (headers == null) {
-        headers = {};
-      }
-      if (!headers.id) {
-        headers.id = "sub-" + this.counter++;
-      }
-      headers.destination = destination;
-      this.subscriptions[headers.id] = callback;
-      this._transmit("SUBSCRIBE", headers);
-      return headers.id;
-    };
-
-    Client.prototype.unsubscribe = function(id) {
-      delete this.subscriptions[id];
-      return this._transmit("UNSUBSCRIBE", {
-        id: id
-      });
-    };
-
-    Client.prototype.begin = function(transaction) {
-      return this._transmit("BEGIN", {
-        transaction: transaction
-      });
-    };
-
-    Client.prototype.commit = function(transaction) {
-      return this._transmit("COMMIT", {
-        transaction: transaction
-      });
-    };
-
-    Client.prototype.abort = function(transaction) {
-      return this._transmit("ABORT", {
-        transaction: transaction
-      });
-    };
-
-    Client.prototype.ack = function(messageID, subscription, headers) {
-      if (headers == null) {
-        headers = {};
-      }
-      headers["message-id"] = messageID;
-      headers.subscription = subscription;
-      return this._transmit("ACK", headers);
-    };
-
-    Client.prototype.nack = function(messageID, subscription, headers) {
-      if (headers == null) {
-        headers = {};
-      }
-      headers["message-id"] = messageID;
-      headers.subscription = subscription;
-      return this._transmit("NACK", headers);
-    };
-
-    return Client;
-
-  })();
-
-  Stomp = {
-    libVersion: "2.0.0-next",
-    VERSIONS: {
-      V1_0: '1.0',
-      V1_1: '1.1',
-      V1_2: '1.2',
-      supportedVersions: function() {
-        return '1.1,1.0';
-      }
-    },
-    client: function(url, protocols) {
-      var klass, ws;
-      if (protocols == null) {
-        protocols = ['v10.stomp', 'v11.stomp'];
-      }
-      klass = Stomp.WebSocketClass || WebSocket;
-      ws = new klass(url, protocols);
-      return new Client(ws);
-    },
-    over: function(ws) {
-      return new Client(ws);
-    },
-    Frame: Frame
-  };
-
-  if (typeof window !== "undefined" && window !== null) {
-    window.Stomp = Stomp;
-  } else if (typeof exports !== "undefined" && exports !== null) {
-    exports.Stomp = Stomp;
-    Stomp.WebSocketClass = require('./test/server.mock.js').StompServerMock;
-  } else {
-    self.Stomp = Stomp;
-  }
-
-}).call(this);
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/Makefile b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/package.mk b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/package.mk
deleted file mode 100644 (file)
index bec87f2..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-RELEASABLE:=true
-DEPS:=cowboy-wrapper sockjs-erlang-wrapper rabbitmq-stomp
-
-WITH_BROKER_TEST_COMMANDS:=rabbit_ws_test_all:all_tests()
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client.erl b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client.erl
deleted file mode 100644 (file)
index 7241f24..0000000
+++ /dev/null
@@ -1,97 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_ws_client).
--behaviour(gen_server).
-
--export([start_link/1]).
--export([sockjs_msg/2, sockjs_closed/1]).
-
--export([init/1, handle_call/3, handle_info/2, terminate/2,
-         code_change/3, handle_cast/2]).
-
--record(state, {conn, processor, parse_state}).
-
-%%----------------------------------------------------------------------------
-
-start_link(Params) ->
-    gen_server:start_link(?MODULE, Params, []).
-
-sockjs_msg(Pid, Data) ->
-    gen_server:cast(Pid, {sockjs_msg, Data}).
-
-sockjs_closed(Pid) ->
-    gen_server:cast(Pid, sockjs_closed).
-
-%%----------------------------------------------------------------------------
-
-init({Processor, Conn}) ->
-    ok = file_handle_cache:obtain(),
-    process_flag(trap_exit, true),
-    {ok, #state{conn        = Conn,
-                processor   = Processor,
-                parse_state = rabbit_stomp_frame:initial_state()}}.
-
-handle_cast({sockjs_msg, Data}, State = #state{processor   = Processor,
-                                               parse_state = ParseState}) ->
-    ParseState1 = process_received_bytes(Data, Processor, ParseState),
-    {noreply, State#state{parse_state = ParseState1}};
-
-handle_cast(sockjs_closed, State) ->
-    {stop, normal, State};
-
-handle_cast(Cast, State) ->
-    {stop, {odd_cast, Cast}, State}.
-
-%% TODO this is a bit rubbish - after the preview release we should
-%% make the credit_flow:send/1 invocation in
-%% rabbit_stomp_processor:process_frame/2 optional.
-handle_info({bump_credit, {_, _}}, State) ->
-    {noreply, State};
-
-handle_info(Info, State) ->
-    {stop, {odd_info, Info}, State}.
-
-handle_call(Request, _From, State) ->
-    {stop, {odd_request, Request}, State}.
-
-terminate(Reason, #state{conn = Conn, processor = Processor}) ->
-    ok = file_handle_cache:release(),
-    _ = case Reason of
-            normal -> % SockJS initiated exit
-                rabbit_stomp_processor:flush_and_die(Processor);
-            shutdown -> % STOMP died
-                Conn:close(1000, "STOMP died")
-        end,
-    ok.
-
-code_change(_OldVsn, State, _Extra) ->
-    {ok, State}.
-
-
-%%----------------------------------------------------------------------------
-
-process_received_bytes(Bytes, Processor, ParseState) ->
-    case rabbit_stomp_frame:parse(Bytes, ParseState) of
-        {ok, Frame, Rest} ->
-            rabbit_stomp_processor:process_frame(Processor, Frame),
-            ParseState1 = rabbit_stomp_frame:initial_state(),
-            process_received_bytes(Rest, Processor, ParseState1);
-        {more, ParseState1} ->
-            ParseState1
-    end.
-
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client_sup.erl b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client_sup.erl
deleted file mode 100644 (file)
index 146b92b..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_ws_client_sup).
--behaviour(supervisor2).
-
--export([start_client/1]).
--export([init/1]).
-
--include_lib("amqp_client/include/amqp_client.hrl").
--include_lib("rabbitmq_stomp/include/rabbit_stomp.hrl").
-
-%% --------------------------------------------------------------------------
-
-start_client({Conn}) ->
-    {ok, SupPid} = supervisor2:start_link(?MODULE, []),
-    {ok, Processor} = start_proc(SupPid, Conn),
-    {ok, Client} = supervisor2:start_child(
-                     SupPid, client_spec(Processor, Conn)),
-    {ok, SupPid, Client}.
-
-start_proc(SupPid, Conn) ->
-    StompConfig = #stomp_configuration{implicit_connect = false},
-
-    SendFun = fun (_Sync, Data) ->
-                      Conn:send(Data),
-                      ok
-              end,
-    Info = Conn:info(),
-    {PeerAddr, PeerPort} = proplists:get_value(peername, Info),
-    {SockAddr, SockPort} = proplists:get_value(sockname, Info),
-    Name = rabbit_misc:format("~s:~b -> ~s:~b",
-                              [rabbit_misc:ntoa(PeerAddr), PeerPort,
-                               rabbit_misc:ntoa(SockAddr), SockPort]),
-    AdapterInfo = #amqp_adapter_info{protocol        = {'Web STOMP', 0},
-                                     host            = SockAddr,
-                                     port            = SockPort,
-                                     peer_host       = PeerAddr,
-                                     peer_port       = PeerPort,
-                                     name            = list_to_binary(Name),
-                                     additional_info = [{ssl, false}]},
-
-    {ok, Processor} =
-        supervisor2:start_child(
-          SupPid, {rabbit_stomp_processor,
-                   {rabbit_stomp_processor, start_link, [StompConfig]},
-                   intrinsic, ?MAX_WAIT, worker,
-                   [rabbit_stomp_processor]}),
-    rabbit_stomp_processor:init_arg(
-      Processor, [SendFun, AdapterInfo, fun (_, _, _, _) -> ok end, none,
-                  PeerAddr]),
-    {ok, Processor}.
-
-client_spec(Processor, Conn) ->
-    {rabbit_ws_client, {rabbit_ws_client, start_link, [{Processor, Conn}]},
-     intrinsic, ?MAX_WAIT, worker, [rabbit_ws_client]}.
-
-init(_Any) ->
-    {ok, {{one_for_all, 0, 1}, []}}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbitmq_web_stomp.app.src b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbitmq_web_stomp.app.src
deleted file mode 100644 (file)
index 246922a..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-{application, rabbitmq_web_stomp,
- [
-  {description, "Rabbit WEB-STOMP - WebSockets to Stomp adapter"},
-  {vsn, "%%VSN%%"},
-  {modules, []},
-  {registered, []},
-  {mod, {rabbit_ws_app, []}},
-  {env, [{port, 15674},
-         {ssl_config, []}]},
-  {applications, [kernel, stdlib, rabbit, rabbitmq_stomp, cowboy, sockjs]}
- ]}.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_raw_websocket.erl b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_raw_websocket.erl
deleted file mode 100644 (file)
index d7b8af6..0000000
+++ /dev/null
@@ -1,71 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_ws_test_raw_websocket).
-
--include_lib("eunit/include/eunit.hrl").
-
-connection_test() ->
-    WS = rfc6455_client:new("ws://127.0.0.1:15674/stomp/websocket", self()),
-    {ok, _} = rfc6455_client:open(WS),
-    {close, _} = rfc6455_client:close(WS),
-    ok.
-
-
-raw_send(WS, Command, Headers) ->
-    raw_send(WS, Command, Headers, <<>>).
-raw_send(WS, Command, Headers, Body) ->
-    Frame = stomp:marshal(Command, Headers, Body),
-    rfc6455_client:send(WS, Frame).
-
-raw_recv(WS) ->
-    {ok, P} = rfc6455_client:recv(WS),
-    stomp:unmarshal(P).
-
-
-pubsub_test() ->
-    WS = rfc6455_client:new("ws://127.0.0.1:15674/stomp/websocket", self()),
-    {ok, _} = rfc6455_client:open(WS),
-    ok = raw_send(WS, "CONNECT", [{"login","guest"}, {"passcode", "guest"}]),
-
-    {<<"CONNECTED">>, _, <<>>} = raw_recv(WS),
-
-    Dst = "/topic/test-" ++ stomp:list_to_hex(binary_to_list(crypto:rand_bytes(8))),
-
-    ok = raw_send(WS, "SUBSCRIBE", [{"destination", Dst},
-                                    {"id", "s0"}]),
-
-    ok = raw_send(WS, "SEND", [{"destination", Dst},
-                              {"content-length", "3"}], <<"a\x00a">>),
-
-    {<<"MESSAGE">>, H, <<"a\x00a">>} = raw_recv(WS),
-    Dst = binary_to_list(proplists:get_value(<<"destination">>, H)),
-
-    {close, _} = rfc6455_client:close(WS),
-    ok.
-
-
-disconnect_test() ->
-    WS = rfc6455_client:new("ws://127.0.0.1:15674/stomp/websocket", self()),
-    {ok, _} = rfc6455_client:open(WS),
-    ok = raw_send(WS, "CONNECT", [{"login","guest"}, {"passcode", "guest"}]),
-
-    {<<"CONNECTED">>, _, <<>>} = raw_recv(WS),
-
-    ok = raw_send(WS, "DISCONNECT", []),
-    {close, {1005, _}} = rfc6455_client:recv(WS),
-
-    ok.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_sockjs_websocket.erl b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rabbit_ws_test_sockjs_websocket.erl
deleted file mode 100644 (file)
index 6aefcf2..0000000
+++ /dev/null
@@ -1,85 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rabbit_ws_test_sockjs_websocket).
-
--include_lib("eunit/include/eunit.hrl").
-
-connection_test() ->
-    WS = rfc6455_client:new("ws://127.0.0.1:15674/stomp/0/0/websocket", self()),
-    {ok, _} = rfc6455_client:open(WS),
-    {ok, <<"o">>} = rfc6455_client:recv(WS),
-
-    {close, _} = rfc6455_client:close(WS),
-    ok.
-
-
-sjs_send(WS, Command, Headers) ->
-    sjs_send(WS, Command, Headers, <<>>).
-sjs_send(WS, Command, Headers, Body) ->
-    StompFrame = stomp:marshal(Command, Headers, Body),
-    SockJSFrame = sockjs_json:encode([StompFrame]),
-    rfc6455_client:send(WS, SockJSFrame).
-
-sjs_recv(WS) ->
-    {ok, P} = rfc6455_client:recv(WS),
-    case P of
-        <<"a", JsonArr/binary>> ->
-            {ok, [StompFrame]} = sockjs_json:decode(JsonArr),
-            {ok, stomp:unmarshal(StompFrame)};
-        <<"c", JsonArr/binary>> ->
-            {ok, CloseReason} = sockjs_json:decode(JsonArr),
-            {close, CloseReason}
-    end.
-
-pubsub_test() ->
-    WS = rfc6455_client:new("ws://127.0.0.1:15674/stomp/0/0/websocket", self()),
-    {ok, _} = rfc6455_client:open(WS),
-    {ok, <<"o">>} = rfc6455_client:recv(WS),
-
-    ok = sjs_send(WS, "CONNECT", [{"login","guest"}, {"passcode", "guest"}]),
-
-    {ok, {<<"CONNECTED">>, _, <<>>}} = sjs_recv(WS),
-
-    Dst = "/topic/test-" ++ stomp:list_to_hex(binary_to_list(crypto:rand_bytes(8))),
-
-    ok = sjs_send(WS, "SUBSCRIBE", [{"destination", Dst},
-                                    {"id", "s0"}]),
-
-    ok = sjs_send(WS, "SEND", [{"destination", Dst},
-                               {"content-length", "3"}], <<"a\x00a">>),
-
-    {ok, {<<"MESSAGE">>, H, <<"a\x00a">>}} = sjs_recv(WS),
-    Dst = binary_to_list(proplists:get_value(<<"destination">>, H)),
-
-    {close, _} = rfc6455_client:close(WS),
-
-    ok.
-
-
-disconnect_test() ->
-    WS = rfc6455_client:new("ws://127.0.0.1:15674/stomp/0/0/websocket", self()),
-    {ok, _} = rfc6455_client:open(WS),
-    {ok, <<"o">>} = rfc6455_client:recv(WS),
-
-    ok = sjs_send(WS, "CONNECT", [{"login","guest"}, {"passcode", "guest"}]),
-    {ok, {<<"CONNECTED">>, _, <<>>}} = sjs_recv(WS),
-
-    ok = sjs_send(WS, "DISCONNECT", []),
-    {close, [1000, _]} = sjs_recv(WS),
-
-    ok.
-
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rfc6455_client.erl b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/rfc6455_client.erl
deleted file mode 100644 (file)
index 6394f10..0000000
+++ /dev/null
@@ -1,236 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(rfc6455_client).
-
--export([new/2, open/1, recv/1, send/2, close/1, close/2]).
-
--record(state, {host, port, addr, path, ppid, socket, data, phase}).
-
-%% --------------------------------------------------------------------------
-
-new(WsUrl, PPid) ->
-    crypto:start(),
-    "ws://" ++ Rest = WsUrl,
-    [Addr, Path] = split("/", Rest, 1),
-    [Host, MaybePort] = split(":", Addr, 1, empty),
-    Port = case MaybePort of
-               empty -> 80;
-               V     -> {I, ""} = string:to_integer(V), I
-           end,
-    State = #state{host = Host,
-                   port = Port,
-                   addr = Addr,
-                   path = "/" ++ Path,
-                   ppid = PPid},
-    spawn(fun () ->
-                  start_conn(State)
-          end).
-
-open(WS) ->
-    receive
-        {rfc6455, open, WS, Opts} ->
-            {ok, Opts};
-        {rfc6455, close, WS, R} ->
-            {close, R}
-    end.
-
-recv(WS) ->
-    receive
-        {rfc6455, recv, WS, Payload} ->
-            {ok, Payload};
-        {rfc6455, close, WS, R} ->
-            {close, R}
-    end.
-
-send(WS, IoData) ->
-    WS ! {send, IoData},
-    ok.
-
-close(WS) ->
-    close(WS, {1000, ""}).
-
-close(WS, WsReason) ->
-    WS ! {close, WsReason},
-    receive
-        {rfc6455, close, WS, R} ->
-            {close, R}
-    end.
-
-
-%% --------------------------------------------------------------------------
-
-start_conn(State) ->
-    {ok, Socket} = gen_tcp:connect(State#state.host, State#state.port,
-                                   [binary,
-                                    {packet, 0}]),
-    Key = base64:encode_to_string(crypto:rand_bytes(16)),
-    gen_tcp:send(Socket,
-        "GET " ++ State#state.path ++ " HTTP/1.1\r\n" ++
-        "Host: " ++ State#state.addr ++ "\r\n" ++
-        "Upgrade: websocket\r\n" ++
-        "Connection: Upgrade\r\n" ++
-        "Sec-WebSocket-Key: " ++ Key ++ "\r\n" ++
-        "Origin: null\r\n" ++
-        "Sec-WebSocket-Version: 13\r\n\r\n"),
-
-    loop(State#state{socket = Socket,
-                     data   = <<>>,
-                     phase = opening}).
-
-do_recv(State = #state{phase = opening, ppid = PPid, data = Data}) ->
-    case split("\r\n\r\n", binary_to_list(Data), 1, empty) of
-        [_Http, empty] -> State;
-        [Http, Data1]   ->
-            %% TODO: don't ignore http response data, verify key
-            PPid ! {rfc6455, open, self(), [{http_response, Http}]},
-            State#state{phase = open,
-                        data = Data1}
-    end;
-do_recv(State = #state{phase = Phase, data = Data, socket = Socket, ppid = PPid})
-  when Phase =:= open orelse Phase =:= closing ->
-    R = case Data of
-            <<F:1, _:3, O:4, 0:1, L:7, Payload:L/binary, Rest/binary>>
-              when L < 126 ->
-                {F, O, Payload, Rest};
-
-            <<F:1, _:3, O:4, 0:1, 126:7, L2:16, Payload:L2/binary, Rest/binary>> ->
-                {F, O, Payload, Rest};
-
-            <<F:1, _:3, O:4, 0:1, 127:7, L2:64, Payload:L2/binary, Rest/binary>> ->
-                {F, O, Payload, Rest};
-
-            <<_:1, _:3, _:4, 1:1, _/binary>> ->
-                %% According o rfc6455 5.1 the server must not mask any frames.
-                die(Socket, PPid, {1006, "Protocol error"}, normal);
-            _ ->
-                moredata
-        end,
-    case R of
-        moredata ->
-            State;
-        _ -> do_recv2(State, R)
-    end.
-
-do_recv2(State = #state{phase = Phase, socket = Socket, ppid = PPid}, R) ->
-    case R of
-        {1, 1, Payload, Rest} ->
-            PPid ! {rfc6455, recv, self(), Payload},
-            State#state{data = Rest};
-        {1, 8, Payload, _Rest} ->
-            WsReason = case Payload of
-                           <<WC:16, WR/binary>> -> {WC, WR};
-                           <<>> -> {1005, "No status received"}
-                       end,
-            case Phase of
-                open -> %% echo
-                    do_close(State, WsReason),
-                    gen_tcp:close(Socket);
-                closing ->
-                    ok
-            end,
-            die(Socket, PPid, WsReason, normal);
-        {_, _, _, Rest2} ->
-            io:format("Unknown frame type~n"),
-            die(Socket, PPid, {1006, "Unknown frame type"}, normal)
-    end.
-
-encode_frame(F, O, Payload) ->
-    Mask = crypto:rand_bytes(4),
-    MaskedPayload = apply_mask(Mask, iolist_to_binary(Payload)),
-
-    L = byte_size(MaskedPayload),
-    IoData = case L of
-                 _ when L < 126 ->
-                     [<<F:1, 0:3, O:4, 1:1, L:7>>, Mask, MaskedPayload];
-                 _ when L < 65536 ->
-                     [<<F:1, 0:3, O:4, 1:1, 126:7, L:16>>, Mask, MaskedPayload];
-                 _ ->
-                     [<<F:1, 0:3, O:4, 1:1, 127:7, L:64>>, Mask, MaskedPayload]
-           end,
-    iolist_to_binary(IoData).
-
-do_send(State = #state{socket = Socket}, Payload) ->
-    gen_tcp:send(Socket, encode_frame(1, 1, Payload)),
-    State.
-
-do_close(State = #state{socket = Socket}, {Code, Reason}) ->
-    Payload = iolist_to_binary([<<Code:16>>, Reason]),
-    gen_tcp:send(Socket, encode_frame(1, 8, Payload)),
-    State#state{phase = closing}.
-
-
-loop(State = #state{socket = Socket, ppid = PPid, data = Data,
-                    phase = Phase}) ->
-    receive
-        {tcp, Socket, Bin} ->
-            State1 = State#state{data = iolist_to_binary([Data, Bin])},
-            loop(do_recv(State1));
-        {send, Payload} when Phase == open ->
-            loop(do_send(State, Payload));
-        {tcp_closed, Socket} ->
-            die(Socket, PPid, {1006, "Connection closed abnormally"}, normal);
-        {close, WsReason} when Phase == open ->
-            loop(do_close(State, WsReason))
-    end.
-
-
-die(Socket, PPid, WsReason, Reason) ->
-    gen_tcp:shutdown(Socket, read_write),
-    PPid ! {rfc6455, close, self(), WsReason},
-    exit(Reason).
-
-
-%% --------------------------------------------------------------------------
-
-split(SubStr, Str, Limit) ->
-    split(SubStr, Str, Limit, "").
-
-split(SubStr, Str, Limit, Default) ->
-    Acc = split(SubStr, Str, Limit, [], Default),
-    lists:reverse(Acc).
-split(_SubStr, Str, 0, Acc, _Default) -> [Str | Acc];
-split(SubStr, Str, Limit, Acc, Default) ->
-    {L, R} = case string:str(Str, SubStr) of
-                 0 -> {Str, Default};
-                 I -> {string:substr(Str, 1, I-1),
-                       string:substr(Str, I+length(SubStr))}
-             end,
-    split(SubStr, R, Limit-1, [L | Acc], Default).
-
-
-apply_mask(Mask, Data) when is_number(Mask) ->
-    apply_mask(<<Mask:32>>, Data);
-
-apply_mask(<<0:32>>, Data) ->
-    Data;
-apply_mask(Mask, Data) ->
-    iolist_to_binary(lists:reverse(apply_mask2(Mask, Data, []))).
-
-apply_mask2(M = <<Mask:32>>, <<Data:32, Rest/binary>>, Acc) ->
-    T = Data bxor Mask,
-    apply_mask2(M, Rest, [<<T:32>> | Acc]);
-apply_mask2(<<Mask:24, _:8>>, <<Data:24>>, Acc) ->
-    T = Data bxor Mask,
-    [<<T:24>> | Acc];
-apply_mask2(<<Mask:16, _:16>>, <<Data:16>>, Acc) ->
-    T = Data bxor Mask,
-    [<<T:16>> | Acc];
-apply_mask2(<<Mask:8, _:24>>, <<Data:8>>, Acc) ->
-    T = Data bxor Mask,
-    [<<T:8>> | Acc];
-apply_mask2(_, <<>>, Acc) ->
-    Acc.
diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/stomp.erl b/rabbitmq-server/plugins-src/rabbitmq-web-stomp/test/src/stomp.erl
deleted file mode 100644 (file)
index 710a7f6..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-%%   The contents of this file are subject to the Mozilla Public License
-%%   Version 1.1 (the "License"); you may not use this file except in
-%%   compliance with the License. You may obtain a copy of the License at
-%%   http://www.mozilla.org/MPL/
-%%
-%%   Software distributed under the License is distributed on an "AS IS"
-%%   basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%%   License for the specific language governing rights and limitations
-%%   under the License.
-%%
-%%   The Original Code is RabbitMQ Management Console.
-%%
-%%   The Initial Developer of the Original Code is GoPivotal, Inc.
-%%   Copyright (c) 2012-2014 GoPivotal, Inc.  All rights reserved.
-%%
-
--module(stomp).
-
--export([marshal/2, marshal/3, unmarshal/1]).
-
--export([list_to_hex/1]).
-
-marshal(Command, Headers) ->
-    marshal(Command, Headers, <<>>).
-marshal(Command, Headers, Body) ->
-    Lines = [Command] ++ [[K, ":", V] || {K, V} <- Headers] ++ [["\n", Body]],
-    iolist_to_binary([iolist_join(Lines, "\n"), "\x00"]).
-
-unmarshal(Frame) ->
-    [Head, Body] = binary:split(Frame, <<"\n\n">>),
-    [Command | HeaderLines] = binary:split(Head, <<"\n">>, [global]),
-    Headers = [list_to_tuple(binary:split(Line, <<":">>)) || Line <- HeaderLines],
-    [Body1, <<>>] = binary:split(Body, [<<0, 10>>],[{scope,{byte_size(Body)-2, 2}}]),
-    {Command, Headers, Body1}.
-
-%% ----------
-
-iolist_join(List, Separator) ->
-    lists:reverse(iolist_join2(List, Separator, [])).
-
-iolist_join2([], _Separator, Acc) ->
-    Acc;
-iolist_join2([E | List], Separator, Acc) ->
-    iolist_join2(List, Separator, [E, Separator | Acc]).
-
-
-list_to_hex(L) ->
-    lists:flatten(lists:map(fun(X) -> int_to_hex(X) end, L)).
-int_to_hex(N) when N < 256 ->
-    [hex(N div 16), hex(N rem 16)].
-hex(N) when N < 10 ->
-    $0+N;
-hex(N) when N >= 10, N < 16 ->
-    $a + (N-10).
diff --git a/rabbitmq-server/plugins-src/release.mk b/rabbitmq-server/plugins-src/release.mk
deleted file mode 100644 (file)
index df72761..0000000
+++ /dev/null
@@ -1,273 +0,0 @@
-# This is a TEMPORARY umbrella makefile, that will likely not survive
-# the repo split.
-
-VERSION=0.0.0
-VDIR=v$(VERSION)
-TAG=rabbitmq_$(subst .,_,$(VDIR))
-BRANCH=default
-
-SIGNING_KEY=056E8E56
-SIGNING_USER_EMAIL=info@rabbitmq.com
-SIGNING_USER_ID=RabbitMQ Release Signing Key <info@rabbitmq.com>
-
-# Misc options to pass to git commands
-GIT_OPTS=
-
-# Misc options to pass to ssh commands
-SSH_OPTS=
-
-PACKAGES_DIR=packages
-TMP_DIR=tmp
-
-SERVER_PACKAGES_DIR=$(PACKAGES_DIR)/rabbitmq-server/$(VDIR)
-MANPAGES_DIR=$(SERVER_PACKAGES_DIR)/man
-JAVA_CLIENT_PACKAGES_DIR=$(PACKAGES_DIR)/rabbitmq-java-client/$(VDIR)
-DOTNET_CLIENT_PACKAGES_DIR=$(PACKAGES_DIR)/rabbitmq-dotnet-client/$(VDIR)
-ERLANG_CLIENT_PACKAGES_DIR=$(PACKAGES_DIR)/rabbitmq-erlang-client/$(VDIR)
-PLUGINS_DIR=$(PACKAGES_DIR)/plugins/$(VDIR)
-PLUGINS_SRC_DIR=$(TMP_DIR)/plugins-src/$(VDIR)
-ABSOLUTE_PLUGINS_DIR=$(CURDIR)/$(PLUGINS_DIR)
-ABSOLUTE_PLUGINS_SRC_DIR=$(CURDIR)/$(PLUGINS_SRC_DIR)
-
-REQUIRED_EMULATOR_VERSION=R13B03
-ACTUAL_EMULATOR_VERSION=$(shell erl -noshell -eval 'io:format("~s",[erlang:system_info(otp_release)]),init:stop().')
-SKIP_EMULATOR_VERSION_CHECK=
-
-REPOS:=rabbitmq-codegen rabbitmq-server rabbitmq-java-client rabbitmq-dotnet-client rabbitmq-test
-
-GITREPOBASE:=$(shell dirname `git remote -v 2>/dev/null | awk '/^origin\t.+ \(fetch\)$$/ { print $$2; }'` 2>/dev/null)
-
-ifeq ($(GITREPOBASE),)
-GITREPOBASE=https://github.com/rabbitmq
-endif
-
-.PHONY: all
-all:
-       @echo Please choose a target from the Makefile.
-
-.PHONY: dist
-ifeq "$(UNOFFICIAL_RELEASE)$(GNUPG_PATH)" ""
-dist:
-       @echo "You must specify one of UNOFFICIAL_RELEASE (to true, if you don't want to sign packages) or GNUPG_PATH (to the location of the RabbitMQ keyring) when making dist."
-       @false
-else
-dist: rabbitmq-server-artifacts
-dist: rabbitmq-java-artifacts
-ifeq ($(SKIP_DOTNET_CLIENT),)
-dist: rabbitmq-dotnet-artifacts
-endif
-dist: rabbitmq-erlang-client-artifacts
-dist: rabbitmq-plugins-srcdist
-dist: rabbitmq-plugins-artifacts
-endif
-
-.PHONY: clean
-clean: clean-packaging
-       $(MAKE) -C . clean
-
-.PHONY: clean-packaging
-       rm -rf $(PACKAGES_DIR) $(TMP_DIR) .*.checkout
-
-.PHONY: prepare
-prepare:
-ifeq "$(SKIP_EMULATOR_VERSION_CHECK)" ""
-       @[ "$(REQUIRED_EMULATOR_VERSION)" = "$(ACTUAL_EMULATOR_VERSION)" ] || \
-               (echo "You are trying to compile with the wrong Erlang/OTP release."; \
-               echo "Please use emulator version $(REQUIRED_EMULATOR_VERSION)."; \
-               echo "Or skip the version check by setting the variable SKIP_EMULATOR_VERSION_CHECK."; \
-               [ -n "$(UNOFFICIAL_RELEASE)" ] )
-endif
-       @echo Checking the presence of the tools necessary to build a release on a Debian based OS.
-       [ -f "/etc/debian_version" ] && dpkg -L cdbs elinks fakeroot findutils gnupg gzip perl python python-simplejson rpm rsync wget reprepro tar tofrodos zip python-pexpect openssl xmlto xsltproc git-core nsis > /dev/null || echo Not a Debian system
-       @echo All required tools are installed, great!
-
-.PHONY: rabbitmq-server-clean
-rabbitmq-server-clean:
-       $(MAKE) -C rabbitmq-server distclean
-       $(MAKE) -C rabbitmq-server/packaging/generic-unix clean
-       $(MAKE) -C rabbitmq-server/packaging/windows clean
-       $(MAKE) -C rabbitmq-server/packaging/windows-exe clean
-       $(MAKE) -C rabbitmq-server/packaging/debs/Debian clean
-       $(MAKE) -C rabbitmq-server/packaging/debs/apt-repository clean
-       $(MAKE) -C rabbitmq-server/packaging/RPMS/Fedora clean
-       $(MAKE) -C rabbitmq-server/packaging/macports clean
-
-.PHONY: rabbitmq-server-artifacts
-rabbitmq-server-artifacts: rabbitmq-server-srcdist
-rabbitmq-server-artifacts: rabbitmq-server-website-manpages
-rabbitmq-server-artifacts: rabbitmq-server-generic-unix-packaging
-rabbitmq-server-artifacts: rabbitmq-server-windows-packaging
-rabbitmq-server-artifacts: rabbitmq-server-windows-exe-packaging
-rabbitmq-server-artifacts: rabbitmq-server-debian-packaging
-rabbitmq-server-artifacts: rabbitmq-server-rpm-packaging
-
-.PHONY: rabbitmq-server-srcdist
-rabbitmq-server-srcdist: prepare rabbitmq-plugins-srcdist
-       $(MAKE) -C rabbitmq-server srcdist VERSION=$(VERSION) PLUGINS_SRC_DIR=$(ABSOLUTE_PLUGINS_SRC_DIR)
-       mkdir -p $(SERVER_PACKAGES_DIR)
-       cp rabbitmq-server/dist/rabbitmq-server-*.tar.gz rabbitmq-server/dist/rabbitmq-server-*.zip $(SERVER_PACKAGES_DIR)
-
-.PHONY: rabbitmq-server-website-manpages
-rabbitmq-server-website-manpages: rabbitmq-server-srcdist
-       $(MAKE) -C rabbitmq-server docs_all VERSION=$(VERSION)
-       mkdir -p $(MANPAGES_DIR)
-       cp rabbitmq-server/docs/*.man.xml $(MANPAGES_DIR)
-
-.PHONY: rabbitmq-server-generic-unix-packaging
-rabbitmq-server-generic-unix-packaging: rabbitmq-server-srcdist
-       $(MAKE) -C rabbitmq-server/packaging/generic-unix dist VERSION=$(VERSION)
-       cp rabbitmq-server/packaging/generic-unix/rabbitmq-server-generic-unix-*.tar.gz $(SERVER_PACKAGES_DIR)
-
-.PHONY: rabbitmq-server-mac-standalone-packaging
-rabbitmq-server-mac-standalone-packaging: rabbitmq-server-srcdist
-       $(MAKE) -C rabbitmq-server/packaging/standalone dist VERSION=$(VERSION) OS=mac
-       cp rabbitmq-server/packaging/standalone/rabbitmq-server-mac-standalone-*.tar.gz $(SERVER_PACKAGES_DIR)
-
-.PHONY: rabbitmq-server-windows-packaging
-rabbitmq-server-windows-packaging: rabbitmq-server-srcdist
-       $(MAKE) -C rabbitmq-server/packaging/windows dist VERSION=$(VERSION)
-       cp rabbitmq-server/packaging/windows/rabbitmq-server-windows-*.zip $(SERVER_PACKAGES_DIR)
-
-.PHONY: rabbitmq-server-windows-exe-packaging
-rabbitmq-server-windows-exe-packaging: rabbitmq-server-windows-packaging
-       $(MAKE) -C rabbitmq-server/packaging/windows-exe clean
-       $(MAKE) -C rabbitmq-server/packaging/windows-exe dist VERSION=$(VERSION)
-       cp rabbitmq-server/packaging/windows-exe/rabbitmq-server-*.exe $(SERVER_PACKAGES_DIR)
-
-.PHONY: rabbitmq-server-debian-packaging
-rabbitmq-server-debian-packaging: rabbitmq-server-srcdist
-       $(MAKE) -C rabbitmq-server/packaging/debs/Debian package \
-               UNOFFICIAL_RELEASE=$(UNOFFICIAL_RELEASE) \
-               GNUPG_PATH=$(GNUPG_PATH) \
-               VERSION=$(VERSION) \
-               SIGNING_KEY_ID=$(SIGNING_KEY)
-       cp rabbitmq-server/packaging/debs/Debian/rabbitmq-server*$(VERSION)*.deb $(SERVER_PACKAGES_DIR)
-       cp rabbitmq-server/packaging/debs/Debian/rabbitmq-server*$(VERSION)*.diff.gz $(SERVER_PACKAGES_DIR)
-       cp rabbitmq-server/packaging/debs/Debian/rabbitmq-server*$(VERSION)*.orig.tar.gz $(SERVER_PACKAGES_DIR)
-       cp rabbitmq-server/packaging/debs/Debian/rabbitmq-server*$(VERSION)*.dsc $(SERVER_PACKAGES_DIR)
-       cp rabbitmq-server/packaging/debs/Debian/rabbitmq-server*$(VERSION)*.changes $(SERVER_PACKAGES_DIR)
-       $(MAKE) -C rabbitmq-server/packaging/debs/apt-repository all \
-               UNOFFICIAL_RELEASE=$(UNOFFICIAL_RELEASE) \
-               GNUPG_PATH=$(GNUPG_PATH) \
-               SIGNING_USER_EMAIL=$(SIGNING_USER_EMAIL)
-       cp -r rabbitmq-server/packaging/debs/apt-repository/debian $(PACKAGES_DIR)
-
-.PHONY: rabbitmq-server-rpm-packaging
-rabbitmq-server-rpm-packaging: rabbitmq-server-srcdist
-       for distro in fedora suse ; do \
-         $(MAKE) -C rabbitmq-server/packaging/RPMS/Fedora rpms VERSION=$(VERSION) RPM_OS=$$distro && \
-         find rabbitmq-server/packaging/RPMS/Fedora -name "*.rpm" -exec cp '{}' $(SERVER_PACKAGES_DIR) ';' ; \
-       done
-
-# This target ssh's into the OSX host in order to finalize the
-# macports repo, so it is not invoked by rabbitmq-server-artifacts.
-# Note that the "clean" below is significant: Because the REAL_WEB_URL
-# environment variable might change, we need to rebuild the macports
-# artifacts at each deploy.
-.PHONY: rabbitmq-server-macports-packaging
-rabbitmq-server-macports-packaging:
-       $(MAKE) -C rabbitmq-server/packaging/macports clean macports VERSION=$(VERSION)
-       cp -r rabbitmq-server/packaging/macports/macports $(PACKAGES_DIR)
-
-
-.PHONY: rabbitmq-java-artifacts
-rabbitmq-java-artifacts: prepare
-       $(MAKE) -C rabbitmq-java-client dist VERSION=$(VERSION)
-       mkdir -p $(JAVA_CLIENT_PACKAGES_DIR)
-       cp rabbitmq-java-client/build/*.tar.gz $(JAVA_CLIENT_PACKAGES_DIR)
-       cp rabbitmq-java-client/build/*.zip $(JAVA_CLIENT_PACKAGES_DIR)
-       cd $(JAVA_CLIENT_PACKAGES_DIR); unzip -q rabbitmq-java-client-javadoc-$(VERSION).zip
-
-
-.PHONY: rabbitmq-dotnet-artifacts
-rabbitmq-dotnet-artifacts: prepare
-       $(MAKE) -C rabbitmq-dotnet-client dist RABBIT_VSN=$(VERSION)
-       mkdir -p $(DOTNET_CLIENT_PACKAGES_DIR)
-       cp -a rabbitmq-dotnet-client/release/* $(DOTNET_CLIENT_PACKAGES_DIR)
-
-
-.PHONY: rabbitmq-erlang-client-artifacts
-rabbitmq-erlang-client-artifacts: prepare
-       $(MAKE) -C rabbitmq-erlang-client distribution VERSION=$(VERSION)
-       mkdir -p $(ERLANG_CLIENT_PACKAGES_DIR)
-       cp rabbitmq-erlang-client/dist/*.ez $(ERLANG_CLIENT_PACKAGES_DIR)
-       cp rabbitmq-erlang-client/dist/*.tar.gz $(ERLANG_CLIENT_PACKAGES_DIR)
-       cp -r rabbitmq-erlang-client/doc/ $(ERLANG_CLIENT_PACKAGES_DIR)
-
-
-.PHONY: rabbitmq-plugins-artifacts
-rabbitmq-plugins-artifacts:
-       $(MAKE) -C . plugins-dist PLUGINS_DIST_DIR=$(ABSOLUTE_PLUGINS_DIR) VERSION=$(VERSION)
-
-.PHONY: rabbitmq-plugins-srcdist
-rabbitmq-plugins-srcdist:
-       $(MAKE) -C . plugins-srcdist PLUGINS_SRC_DIST_DIR=$(ABSOLUTE_PLUGINS_SRC_DIR) VERSION=$(VERSION)
-
-.PHONY: sign-artifacts
-ifneq "$(UNOFFICIAL_RELEASE)" ""
-sign-artifacts:
-       true
-else
-sign-artifacts:
-       python util/nopassphrase.py \
-            rpm --addsign \
-               --define '_signature gpg' \
-               --define '_gpg_path $(GNUPG_PATH)/.gnupg/' \
-               --define '_gpg_name $(SIGNING_USER_ID)' \
-               $(PACKAGES_DIR)/*/*/*.rpm
-       for p in \
-               $(SERVER_PACKAGES_DIR)/* \
-               $(JAVA_CLIENT_PACKAGES_DIR)/* \
-               $(ERLANG_CLIENT_PACKAGES_DIR)/* \
-       ; do \
-               [ -f $$p ] && \
-                       HOME=$(GNUPG_PATH) gpg --default-key $(SIGNING_KEY) -abs -o $$p.asc $$p ; \
-       done
-endif
-
-###########################################################################
-
-DEPLOY_HOST=localhost
-DEPLOY_PATH=/tmp/rabbitmq/extras/releases
-DEPLOY_DEST=$(DEPLOY_HOST):$(DEPLOY_PATH)
-
-RSYNC_CMD=rsync -rpl --delete-after
-
-DEPLOYMENT_SUBDIRECTORIES=rabbitmq-server rabbitmq-java-client rabbitmq-dotnet-client rabbitmq-erlang-client
-
-DEPLOY_RSYNC_CMDS=\
-       set -x -e; \
-       for subdirectory in $(DEPLOYMENT_SUBDIRECTORIES) ; do \
-               ssh $(SSH_OPTS) $(DEPLOY_HOST) "(cd $(DEPLOY_PATH); if [ ! -d $$subdirectory ] ; then mkdir -p $$subdirectory; chmod g+w $$subdirectory; fi)"; \
-               $(RSYNC_CMD) $(PACKAGES_DIR)/$$subdirectory/* \
-                   $(DEPLOY_DEST)/$$subdirectory ; \
-       done; \
-       for subdirectory in debian ; do \
-               $(RSYNC_CMD) $(PACKAGES_DIR)/$$subdirectory \
-                   $(DEPLOY_DEST); \
-       done; \
-       unpacked_javadoc_dir=`(cd packages/rabbitmq-java-client; ls -td */rabbitmq-java-client-javadoc-*/ | head -1)`; \
-       ssh $(SSH_OPTS) $(DEPLOY_HOST) "(cd $(DEPLOY_PATH)/rabbitmq-java-client; rm -f current-javadoc; ln -s $$unpacked_javadoc_dir current-javadoc)"; \
-       ssh $(SSH_OPTS) $(DEPLOY_HOST) "(cd $(DEPLOY_PATH)/rabbitmq-server; rm -f current; ln -s $(VDIR) current)"; \
-
-deploy: verify-signatures fixup-permissions-for-deploy
-       $(DEPLOY_RSYNC_CMDS)
-
-deploy-live: verify-signatures deploy-maven fixup-permissions-for-deploy
-       $(DEPLOY_RSYNC_CMDS)
-
-fixup-permissions-for-deploy:
-       chmod -R g+w $(PACKAGES_DIR)
-       chmod g+s `find $(PACKAGES_DIR) -type d`
-
-verify-signatures:
-       for file in `find $(PACKAGES_DIR) -type f -name "*.asc"`; do \
-           echo "Checking $$file" ; \
-           if ! HOME=$(GNUPG_PATH) gpg --verify $$file `echo $$file | sed -e 's/\.asc$$//'`; then \
-               bad_signature=1 ; \
-           fi ; \
-       done ; \
-       [ -z "$$bad_signature" ]
-
-deploy-maven: verify-signatures
-       $(MAKE) -C rabbitmq-java-client stage-and-promote-maven-bundle SIGNING_KEY=$(SIGNING_KEY) VERSION=$(VERSION) GNUPG_PATH=$(GNUPG_PATH)
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0000-remove-spec-patch.diff b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0000-remove-spec-patch.diff
deleted file mode 100644 (file)
index a7f6261..0000000
+++ /dev/null
@@ -1,816 +0,0 @@
-diff --git a/src/mochijson2_fork.erl b/src/mochijson2_fork.erl
-index 355f068..a088d9d 100644
---- a/src/mochijson2_fork.erl
-+++ b/src/mochijson2_fork.erl
-@@ -91,17 +91,17 @@
- -define(IS_WHITESPACE(C),
-         (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
---type(decoder_option() :: any()).
---type(handler_option() :: any()).
--
---type(json_string() :: atom | binary()).
---type(json_number() :: integer() | float()).
---type(json_array() :: [json_term()]).
---type(json_object() :: {struct, [{json_string(), json_term()}]}).
---type(json_eep18_object() :: {[{json_string(), json_term()}]}).
---type(json_iolist() :: {json, iolist()}).
---type(json_term() :: json_string() | json_number() | json_array() |
--                    json_object() | json_eep18_object() | json_iolist()).
-+%% -type(decoder_option() :: any()).
-+%% -type(handler_option() :: any()).
-+
-+%% -type(json_string() :: atom | binary()).
-+%% -type(json_number() :: integer() | float()).
-+%% -type(json_array() :: [json_term()]).
-+%% -type(json_object() :: {struct, [{json_string(), json_term()}]}).
-+%% -type(json_eep18_object() :: {[{json_string(), json_term()}]}).
-+%% -type(json_iolist() :: {json, iolist()}).
-+%% -type(json_term() :: json_string() | json_number() | json_array() |
-+%%                    json_object() | json_eep18_object() | json_iolist()).
- -record(encoder, {handler=null,
-                   utf8=false}).
-@@ -112,27 +112,27 @@
-                   column=1,
-                   state=null}).
---type(utf8_option() :: boolean()).
---type(encoder_option() :: handler_option() | utf8_option()).
---spec encoder([encoder_option()]) -> function().
-+%% -type(utf8_option() :: boolean()).
-+%% -type(encoder_option() :: handler_option() | utf8_option()).
-+%% -spec encoder([encoder_option()]) -> function().
- %% @doc Create an encoder/1 with the given options.
- %% Emit unicode as utf8 (default - false)
- encoder(Options) ->
-     State = parse_encoder_options(Options, #encoder{}),
-     fun (O) -> json_encode(O, State) end.
---spec encode(json_term()) -> iolist().
-+%% -spec encode(json_term()) -> iolist().
- %% @doc Encode the given as JSON to an iolist.
- encode(Any) ->
-     json_encode(Any, #encoder{}).
---spec decoder([decoder_option()]) -> function().
-+%% -spec decoder([decoder_option()]) -> function().
- %% @doc Create a decoder/1 with the given options.
- decoder(Options) ->
-     State = parse_decoder_options(Options, #decoder{}),
-     fun (O) -> json_decode(O, State) end.
---spec decode(iolist(), [{format, proplist | eep18 | struct}]) -> json_term().
-+%% -spec decode(iolist(), [{format, proplist | eep18 | struct}]) -> json_term().
- %% @doc Decode the given iolist to Erlang terms using the given object format
- %%      for decoding, where proplist returns JSON objects as [{binary(), json_term()}]
- %%      proplists, eep18 returns JSON objects as {[binary(), json_term()]}, and struct
-@@ -140,7 +140,7 @@ decoder(Options) ->
- decode(S, Options) ->
-     json_decode(S, parse_decoder_options(Options, #decoder{})).
---spec decode(iolist()) -> json_term().
-+%% -spec decode(iolist()) -> json_term().
- %% @doc Decode the given iolist to Erlang terms.
- decode(S) ->
-     json_decode(S, #decoder{}).
-diff --git a/src/sockjs.erl b/src/sockjs.erl
-index 68163ca..98b1173 100644
---- a/src/sockjs.erl
-+++ b/src/sockjs.erl
-@@ -2,23 +2,23 @@
- -export([send/2, close/1, close/3, info/1]).
---type(conn() :: {sockjs_session, any()}).
-+%% -type(conn() :: {sockjs_session, any()}).
- %% Send data over a connection.
---spec send(iodata(), conn()) -> ok.
-+%% -spec send(iodata(), conn()) -> ok.
- send(Data, Conn = {sockjs_session, _}) ->
-     sockjs_session:send(Data, Conn).
- %% Initiate a close of a connection.
---spec close(conn()) -> ok.
-+%% -spec close(conn()) -> ok.
- close(Conn) ->
-     close(1000, "Normal closure", Conn).
---spec close(non_neg_integer(), string(), conn()) -> ok.
-+%% -spec close(non_neg_integer(), string(), conn()) -> ok.
- close(Code, Reason, Conn = {sockjs_session, _}) ->
-     sockjs_session:close(Code, Reason, Conn).
---spec info(conn()) -> [{atom(), any()}].
-+%% -spec info(conn()) -> [{atom(), any()}].
- info(Conn = {sockjs_session, _}) ->
-     sockjs_session:info(Conn).
-diff --git a/src/sockjs_action.erl b/src/sockjs_action.erl
-index 3f13beb..4310963 100644
---- a/src/sockjs_action.erl
-+++ b/src/sockjs_action.erl
-@@ -45,17 +45,17 @@
- %% --------------------------------------------------------------------------
---spec welcome_screen(req(), headers(), service()) -> req().
-+%% -spec welcome_screen(req(), headers(), service()) -> req().
- welcome_screen(Req, Headers, _Service) ->
-     H = [{"Content-Type", "text/plain; charset=UTF-8"}],
-     sockjs_http:reply(200, H ++ Headers,
-           "Welcome to SockJS!\n", Req).
---spec options(req(), headers(), service()) -> req().
-+%% -spec options(req(), headers(), service()) -> req().
- options(Req, Headers, _Service) ->
-     sockjs_http:reply(204, Headers, "", Req).
---spec iframe(req(), headers(), service()) -> req().
-+%% -spec iframe(req(), headers(), service()) -> req().
- iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) ->
-     IFrame = io_lib:format(?IFRAME, [SockjsUrl]),
-     MD5 = "\"" ++ binary_to_list(base64:encode(erlang:md5(IFrame))) ++ "\"",
-@@ -68,7 +68,7 @@ iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) ->
-     end.
---spec info_test(req(), headers(), service()) -> req().
-+%% -spec info_test(req(), headers(), service()) -> req().
- info_test(Req, Headers, #service{websocket = Websocket,
-                                  cookie_needed = CookieNeeded}) ->
-     I = [{websocket, Websocket},
-@@ -81,12 +81,12 @@ info_test(Req, Headers, #service{websocket = Websocket,
- %% --------------------------------------------------------------------------
---spec xhr_polling(req(), headers(), service(), session()) -> req().
-+%% -spec xhr_polling(req(), headers(), service(), session()) -> req().
- xhr_polling(Req, Headers, Service, Session) ->
-     Req1 = chunk_start(Req, Headers),
-     reply_loop(Req1, Session, 1, fun fmt_xhr/1, Service).
---spec xhr_streaming(req(), headers(), service(), session()) -> req().
-+%% -spec xhr_streaming(req(), headers(), service(), session()) -> req().
- xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit},
-               Session) ->
-     Req1 = chunk_start(Req, Headers),
-@@ -96,7 +96,7 @@ xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit},
-                  fun fmt_xhr/1),
-     reply_loop(Req2, Session, ResponseLimit, fun fmt_xhr/1, Service).
---spec eventsource(req(), headers(), service(), session()) -> req().
-+%% -spec eventsource(req(), headers(), service(), session()) -> req().
- eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit},
-             SessionId) ->
-     Req1 = chunk_start(Req, Headers, "text/event-stream; charset=UTF-8"),
-@@ -104,7 +104,7 @@ eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit},
-     reply_loop(Req2, SessionId, ResponseLimit, fun fmt_eventsource/1, Service).
---spec htmlfile(req(), headers(), service(), session()) -> req().
-+%% -spec htmlfile(req(), headers(), service(), session()) -> req().
- htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit},
-          SessionId) ->
-     S = fun (Req1, CB) ->
-@@ -119,7 +119,7 @@ htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit},
-         end,
-     verify_callback(Req, S).
---spec jsonp(req(), headers(), service(), session()) -> req().
-+%% -spec jsonp(req(), headers(), service(), session()) -> req().
- jsonp(Req, Headers, Service, SessionId) ->
-     S = fun (Req1, CB) ->
-                 Req2 = chunk_start(Req1, Headers),
-@@ -139,7 +139,7 @@ verify_callback(Req, Success) ->
- %% --------------------------------------------------------------------------
---spec xhr_send(req(), headers(), service(), session()) -> req().
-+%% -spec xhr_send(req(), headers(), service(), session()) -> req().
- xhr_send(Req, Headers, _Service, Session) ->
-     {Body, Req1} = sockjs_http:body(Req),
-     case handle_recv(Req1, Body, Session) of
-@@ -150,7 +150,7 @@ xhr_send(Req, Headers, _Service, Session) ->
-             sockjs_http:reply(204, H ++ Headers, "", Req1)
-     end.
---spec jsonp_send(req(), headers(), service(), session()) -> req().
-+%% -spec jsonp_send(req(), headers(), service(), session()) -> req().
- jsonp_send(Req, Headers, _Service, Session) ->
-     {Body, Req1} = sockjs_http:body_qs(Req),
-     case handle_recv(Req1, Body, Session) of
-@@ -236,21 +236,21 @@ chunk_end(Req) -> sockjs_http:chunk_end(Req).
- chunk_end(Req, Body, Fmt) -> Req1 = chunk(Req, Body, Fmt),
-                              chunk_end(Req1).
---spec fmt_xhr(iodata()) -> iodata().
-+%% -spec fmt_xhr(iodata()) -> iodata().
- fmt_xhr(Body) -> [Body, "\n"].
---spec fmt_eventsource(iodata()) -> iodata().
-+%% -spec fmt_eventsource(iodata()) -> iodata().
- fmt_eventsource(Body) ->
-     Escaped = sockjs_util:url_escape(binary_to_list(iolist_to_binary(Body)),
-                                      "%\r\n\0"), %% $% must be first!
-     [<<"data: ">>, Escaped, <<"\r\n\r\n">>].
---spec fmt_htmlfile(iodata()) -> iodata().
-+%% -spec fmt_htmlfile(iodata()) -> iodata().
- fmt_htmlfile(Body) ->
-     Double = sockjs_json:encode(iolist_to_binary(Body)),
-     [<<"<script>\np(">>, Double, <<");\n</script>\r\n">>].
---spec fmt_jsonp(iodata(), iodata()) -> iodata().
-+%% -spec fmt_jsonp(iodata(), iodata()) -> iodata().
- fmt_jsonp(Body, Callback) ->
-     %% Yes, JSONed twice, there isn't a a better way, we must pass
-     %% a string back, and the script, will be evaled() by the
-@@ -259,7 +259,7 @@ fmt_jsonp(Body, Callback) ->
- %% --------------------------------------------------------------------------
---spec websocket(req(), headers(), service()) -> req().
-+%% -spec websocket(req(), headers(), service()) -> req().
- websocket(Req, Headers, Service) ->
-     {_Any, Req1, {R1, R2}} = sockjs_handler:is_valid_ws(Service, Req),
-     case {R1, R2} of
-@@ -274,6 +274,6 @@ websocket(Req, Headers, Service) ->
-                               "This WebSocket request can't be handled.", Req1)
-     end.
---spec rawwebsocket(req(), headers(), service()) -> req().
-+%% -spec rawwebsocket(req(), headers(), service()) -> req().
- rawwebsocket(Req, Headers, Service) ->
-     websocket(Req, Headers, Service).
-diff --git a/src/sockjs_app.erl b/src/sockjs_app.erl
-index 1b8e77c..54aceb6 100644
---- a/src/sockjs_app.erl
-+++ b/src/sockjs_app.erl
-@@ -4,11 +4,11 @@
- -export([start/2, stop/1]).
---spec start(_, _) -> {ok, pid()}.
-+%% -spec start(_, _) -> {ok, pid()}.
- start(_StartType, _StartArgs) ->
-     sockjs_session:init(),
-     sockjs_session_sup:start_link().
---spec stop(_) -> ok.
-+%% -spec stop(_) -> ok.
- stop(_State) ->
-     ok.
-diff --git a/src/sockjs_filters.erl b/src/sockjs_filters.erl
-index 15aa8e3..fba43cc 100644
---- a/src/sockjs_filters.erl
-+++ b/src/sockjs_filters.erl
-@@ -9,7 +9,7 @@
- %% --------------------------------------------------------------------------
---spec cache_for(req(), headers()) -> {headers(), req()}.
-+%% -spec cache_for(req(), headers()) -> {headers(), req()}.
- cache_for(Req, Headers) ->
-     Expires = calendar:gregorian_seconds_to_datetime(
-                 calendar:datetime_to_gregorian_seconds(
-@@ -18,7 +18,7 @@ cache_for(Req, Headers) ->
-          {"Expires",       httpd_util:rfc1123_date(Expires)}],
-     {H ++ Headers, Req}.
---spec h_sid(req(), headers()) -> {headers(), req()}.
-+%% -spec h_sid(req(), headers()) -> {headers(), req()}.
- h_sid(Req, Headers) ->
-     %% Some load balancers do sticky sessions, but only if there is
-     %% a JSESSIONID cookie. If this cookie isn't yet set, we shall
-@@ -31,12 +31,12 @@ h_sid(Req, Headers) ->
-     end,
-     {H ++ Headers, Req2}.
---spec h_no_cache(req(), headers()) -> {headers(), req()}.
-+%% -spec h_no_cache(req(), headers()) -> {headers(), req()}.
- h_no_cache(Req, Headers) ->
-     H = [{"Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"}],
-     {H ++ Headers, Req}.
---spec xhr_cors(req(), headers()) -> {headers(), req()}.
-+%% -spec xhr_cors(req(), headers()) -> {headers(), req()}.
- xhr_cors(Req, Headers) ->
-     {OriginH, Req1} = sockjs_http:header('Origin', Req),
-      Origin = case OriginH of
-@@ -54,15 +54,15 @@ xhr_cors(Req, Headers) ->
-          {"Access-Control-Allow-Credentials", "true"}],
-     {H ++ AllowHeaders ++ Headers, Req2}.
---spec xhr_options_post(req(), headers()) -> {headers(), req()}.
-+%% -spec xhr_options_post(req(), headers()) -> {headers(), req()}.
- xhr_options_post(Req, Headers) ->
-     xhr_options(Req, Headers, ["OPTIONS", "POST"]).
---spec xhr_options_get(req(), headers()) -> {headers(), req()}.
-+%% -spec xhr_options_get(req(), headers()) -> {headers(), req()}.
- xhr_options_get(Req, Headers) ->
-     xhr_options(Req, Headers, ["OPTIONS", "GET"]).
---spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}.
-+%% -spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}.
- xhr_options(Req, Headers, Methods) ->
-     H = [{"Access-Control-Allow-Methods", string:join(Methods, ", ")},
-          {"Access-Control-Max-Age", integer_to_list(?YEAR)}],
-diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl
-index ebb3982..b706453 100644
---- a/src/sockjs_handler.erl
-+++ b/src/sockjs_handler.erl
-@@ -11,7 +11,7 @@
- %% --------------------------------------------------------------------------
---spec init_state(binary(), callback(), any(), list(tuple())) -> service().
-+%% -spec init_state(binary(), callback(), any(), list(tuple())) -> service().
- init_state(Prefix, Callback, State, Options) ->
-     #service{prefix = binary_to_list(Prefix),
-              callback = Callback,
-@@ -34,7 +34,7 @@ init_state(Prefix, Callback, State, Options) ->
- %% --------------------------------------------------------------------------
---spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}.
-+%% -spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}.
- is_valid_ws(Service, Req) ->
-     case get_action(Service, Req) of
-         {{match, WS}, Req1} when WS =:= websocket orelse
-@@ -44,7 +44,7 @@ is_valid_ws(Service, Req) ->
-             {false, Req1, {}}
-     end.
---spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}.
-+%% -spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}.
- valid_ws_request(_Service, Req) ->
-     {R1, Req1} = valid_ws_upgrade(Req),
-     {R2, Req2} = valid_ws_connection(Req1),
-@@ -73,7 +73,7 @@ valid_ws_connection(Req) ->
-             {lists:member("upgrade", Vs), Req2}
-     end.
---spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}.
-+%% -spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}.
- get_action(Service, Req) ->
-     {Dispatch, Req1} = dispatch_req(Service, Req),
-     case Dispatch of
-@@ -93,20 +93,20 @@ strip_prefix(LongPath, Prefix) ->
-     end.
---type(dispatch_result() ::
--        nomatch |
--        {match, {send | recv | none , atom(),
--                 server(), session(), list(atom())}} |
--        {bad_method, list(atom())}).
-+%% -type(dispatch_result() ::
-+%%        nomatch |
-+%%        {match, {send | recv | none , atom(),
-+%%                 server(), session(), list(atom())}} |
-+%%        {bad_method, list(atom())}).
---spec dispatch_req(service(), req()) -> {dispatch_result(), req()}.
-+%% -spec dispatch_req(service(), req()) -> {dispatch_result(), req()}.
- dispatch_req(#service{prefix = Prefix}, Req) ->
-     {Method, Req1} = sockjs_http:method(Req),
-     {LongPath, Req2} = sockjs_http:path(Req1),
-     {ok, PathRemainder} = strip_prefix(LongPath, Prefix),
-     {dispatch(Method, PathRemainder), Req2}.
---spec dispatch(atom(), nonempty_string()) -> dispatch_result().
-+%% -spec dispatch(atom(), nonempty_string()) -> dispatch_result().
- dispatch(Method, Path) ->
-     lists:foldl(
-       fun ({Match, MethodFilters}, nomatch) ->
-@@ -163,7 +163,7 @@ re(Path, S) ->
- %% --------------------------------------------------------------------------
---spec handle_req(service(), req()) -> req().
-+%% -spec handle_req(service(), req()) -> req().
- handle_req(Service = #service{logger = Logger}, Req) ->
-     Req0 = Logger(Service, Req, http),
-@@ -202,14 +202,14 @@ handle({match, {Type, Action, _Server, Session, Filters}}, Service, Req) ->
- %% --------------------------------------------------------------------------
---spec default_logger(service(), req(), websocket | http) -> req().
-+%% -spec default_logger(service(), req(), websocket | http) -> req().
- default_logger(_Service, Req, _Type) ->
-     {LongPath, Req1} = sockjs_http:path(Req),
-     {Method, Req2}   = sockjs_http:method(Req1),
-     io:format("~s ~s~n", [Method, LongPath]),
-     Req2.
---spec extract_info(req()) -> {info(), req()}.
-+%% -spec extract_info(req()) -> {info(), req()}.
- extract_info(Req) ->
-     {Peer, Req0}    = sockjs_http:peername(Req),
-     {Sock, Req1}    = sockjs_http:sockname(Req0),
-diff --git a/src/sockjs_http.erl b/src/sockjs_http.erl
-index 9754119..5cdf431 100644
---- a/src/sockjs_http.erl
-+++ b/src/sockjs_http.erl
-@@ -8,22 +8,22 @@
- %% --------------------------------------------------------------------------
---spec path(req()) -> {string(), req()}.
-+%% -spec path(req()) -> {string(), req()}.
- path({cowboy, Req})       -> {Path, Req1} = cowboy_http_req:raw_path(Req),
-                              {binary_to_list(Path), {cowboy, Req1}}.
---spec method(req()) -> {atom(), req()}.
-+%% -spec method(req()) -> {atom(), req()}.
- method({cowboy, Req})       -> {Method, Req1} = cowboy_http_req:method(Req),
-                                case is_binary(Method) of
-                                    true  -> {binary_to_atom(Method, utf8), {cowboy, Req1}};
-                                    false -> {Method, {cowboy, Req1}}
-                                end.
---spec body(req()) -> {binary(), req()}.
-+%% -spec body(req()) -> {binary(), req()}.
- body({cowboy, Req})       -> {ok, Body, Req1} = cowboy_http_req:body(Req),
-                              {Body, {cowboy, Req1}}.
---spec body_qs(req()) -> {binary(), req()}.
-+%% -spec body_qs(req()) -> {binary(), req()}.
- body_qs(Req) ->
-     {H, Req1} =  header('Content-Type', Req),
-     case H of
-@@ -42,7 +42,7 @@ body_qs2({cowboy, Req}) ->
-             {V, {cowboy, Req1}}
-     end.
---spec header(atom(), req()) -> {nonempty_string() | undefined, req()}.
-+%% -spec header(atom(), req()) -> {nonempty_string() | undefined, req()}.
- header(K, {cowboy, Req})->
-     {H, Req2} = cowboy_http_req:header(K, Req),
-     {V, Req3} = case H of
-@@ -55,7 +55,7 @@ header(K, {cowboy, Req})->
-         _         -> {binary_to_list(V), {cowboy, Req3}}
-     end.
---spec jsessionid(req()) -> {nonempty_string() | undefined, req()}.
-+%% -spec jsessionid(req()) -> {nonempty_string() | undefined, req()}.
- jsessionid({cowboy, Req}) ->
-     {C, Req2} = cowboy_http_req:cookie(<<"JSESSIONID">>, Req),
-     case C of
-@@ -65,7 +65,7 @@ jsessionid({cowboy, Req}) ->
-             {undefined, {cowboy, Req2}}
-     end.
---spec callback(req()) -> {nonempty_string() | undefined, req()}.
-+%% -spec callback(req()) -> {nonempty_string() | undefined, req()}.
- callback({cowboy, Req}) ->
-     {CB, Req1} = cowboy_http_req:qs_val(<<"c">>, Req),
-     case CB of
-@@ -73,12 +73,12 @@ callback({cowboy, Req}) ->
-         _         -> {binary_to_list(CB), {cowboy, Req1}}
-     end.
---spec peername(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
-+%% -spec peername(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
- peername({cowboy, Req}) ->
-     {P, Req1} = cowboy_http_req:peer(Req),
-     {P, {cowboy, Req1}}.
---spec sockname(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
-+%% -spec sockname(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
- sockname({cowboy, Req} = R) ->
-     {ok, _T, S} = cowboy_http_req:transport(Req),
-     %% Cowboy has peername(), but doesn't have sockname() equivalent.
-@@ -92,18 +92,18 @@ sockname({cowboy, Req} = R) ->
- %% --------------------------------------------------------------------------
---spec reply(non_neg_integer(), headers(), iodata(), req()) -> req().
-+%% -spec reply(non_neg_integer(), headers(), iodata(), req()) -> req().
- reply(Code, Headers, Body, {cowboy, Req}) ->
-     Body1 = iolist_to_binary(Body),
-     {ok, Req1} = cowboy_http_req:reply(Code, enbinary(Headers), Body1, Req),
-     {cowboy, Req1}.
---spec chunk_start(non_neg_integer(), headers(), req()) -> req().
-+%% -spec chunk_start(non_neg_integer(), headers(), req()) -> req().
- chunk_start(Code, Headers, {cowboy, Req}) ->
-     {ok, Req1} = cowboy_http_req:chunked_reply(Code, enbinary(Headers), Req),
-     {cowboy, Req1}.
---spec chunk(iodata(), req()) -> {ok | error, req()}.
-+%% -spec chunk(iodata(), req()) -> {ok | error, req()}.
- chunk(Chunk, {cowboy, Req} = R) ->
-     case cowboy_http_req:chunk(Chunk, Req) of
-         ok          -> {ok, R};
-@@ -112,25 +112,25 @@ chunk(Chunk, {cowboy, Req} = R) ->
-                       %% should catch tco socket closure before.
-     end.
---spec chunk_end(req()) -> req().
-+%% -spec chunk_end(req()) -> req().
- chunk_end({cowboy, _Req} = R)  -> R.
- enbinary(L) -> [{list_to_binary(K), list_to_binary(V)} || {K, V} <- L].
---spec hook_tcp_close(req()) -> req().
-+%% -spec hook_tcp_close(req()) -> req().
- hook_tcp_close(R = {cowboy, Req}) ->
-     {ok, T, S} = cowboy_http_req:transport(Req),
-     T:setopts(S,[{active,once}]),
-     R.
---spec unhook_tcp_close(req()) -> req().
-+%% -spec unhook_tcp_close(req()) -> req().
- unhook_tcp_close(R = {cowboy, Req}) ->
-     {ok, T, S} = cowboy_http_req:transport(Req),
-     T:setopts(S,[{active,false}]),
-     R.
---spec abruptly_kill(req()) -> req().
-+%% -spec abruptly_kill(req()) -> req().
- abruptly_kill(R = {cowboy, Req}) ->
-     {ok, T, S} = cowboy_http_req:transport(Req),
-     T:close(S),
-diff --git a/src/sockjs_internal.hrl b/src/sockjs_internal.hrl
-index 4f696d8..629b2fe 100644
---- a/src/sockjs_internal.hrl
-+++ b/src/sockjs_internal.hrl
-@@ -1,32 +1,32 @@
---type(req()          :: {cowboy, any()}).
-+%% -type(req()          :: {cowboy, any()}).
---type(user_session() :: nonempty_string()).
---type(emittable()    :: init|closed|{recv, binary()}).
---type(callback()     :: fun((user_session(), emittable(), any()) -> ok)).
---type(logger()       :: fun((any(), req(), websocket|http) -> req())).
-+%% -type(user_session() :: nonempty_string()).
-+%% -type(emittable()    :: init|closed|{recv, binary()}).
-+%% -type(callback()     :: fun((user_session(), emittable(), any()) -> ok)).
-+%% -type(logger()       :: fun((any(), req(), websocket|http) -> req())).
---record(service, {prefix           :: nonempty_string(),
--                  callback         :: callback(),
--                  state            :: any(),
--                  sockjs_url       :: nonempty_string(),
--                  cookie_needed    :: boolean(),
--                  websocket        :: boolean(),
--                  disconnect_delay :: non_neg_integer(),
--                  heartbeat_delay  :: non_neg_integer(),
--                  response_limit   :: non_neg_integer(),
--                  logger           :: logger()
-+-record(service, {prefix           , %%  nonempty_string(),
-+                  callback         , %%  callback()
-+                  state            , %%  any()
-+                  sockjs_url       , %%  nonempty_string()
-+                  cookie_needed    , %%  boolean()
-+                  websocket        , %%  boolean()
-+                  disconnect_delay , %%  non_neg_integer()
-+                  heartbeat_delay  , %%  non_neg_integer()
-+                  response_limit   , %%  non_neg_integer()
-+                  logger            %%  logger()
-                   }).
---type(service() :: #service{}).
-+%% -type(service() :: #service{}).
---type(headers() :: list({nonempty_string(), nonempty_string()})).
---type(server()  :: nonempty_string()).
---type(session() :: nonempty_string()).
-+%% -type(headers() :: list({nonempty_string(), nonempty_string()})).
-+%% -type(server()  :: nonempty_string()).
-+%% -type(session() :: nonempty_string()).
---type(frame()   :: {open, nil} |
--                   {close, {non_neg_integer(), string()}} |
--                   {data, list(iodata())} |
--                   {heartbeat, nil} ).
-+%% -type(frame()   :: {open, nil} |
-+%%                   {close, {non_neg_integer(), string()}} |
-+%%                   {data, list(iodata())} |
-+%%                   {heartbeat, nil} ).
---type(info()    :: [{atom(), any()}]).
-+%% -type(info()    :: [{atom(), any()}]).
-diff --git a/src/sockjs_json.erl b/src/sockjs_json.erl
-index e61f4b9..d3dae20 100644
---- a/src/sockjs_json.erl
-+++ b/src/sockjs_json.erl
-@@ -4,11 +4,11 @@
- %% --------------------------------------------------------------------------
---spec encode(any()) -> iodata().
-+%% -spec encode(any()) -> iodata().
- encode(Thing) ->
-     mochijson2_fork:encode(Thing).
---spec decode(iodata()) -> {ok, any()} | {error, any()}.
-+%% -spec decode(iodata()) -> {ok, any()} | {error, any()}.
- decode(Encoded) ->
-     try mochijson2_fork:decode(Encoded) of
-         V -> {ok, V}
-diff --git a/src/sockjs_session.erl b/src/sockjs_session.erl
-index 66c5df0..7e4ae00 100644
---- a/src/sockjs_session.erl
-+++ b/src/sockjs_session.erl
-@@ -11,39 +11,39 @@
-          handle_cast/2]).
- -include("sockjs_internal.hrl").
---type(handle() :: {?MODULE, {pid(), info()}}).
--
---record(session, {id                           :: session(),
--                  outbound_queue = queue:new() :: queue(),
--                  response_pid                 :: pid(),
--                  disconnect_tref              :: reference(),
--                  disconnect_delay = 5000      :: non_neg_integer(),
--                  heartbeat_tref               :: reference() | triggered,
--                  heartbeat_delay = 25000      :: non_neg_integer(),
--                  ready_state = connecting     :: connecting | open | closed,
--                  close_msg                    :: {non_neg_integer(), string()},
-+%% -type(handle() :: {?MODULE, {pid(), info()}}).
-+
-+-record(session, {id                           , %%  session(),
-+                  outbound_queue = queue:new() , %%  queue()
-+                  response_pid                 , %%  pid()
-+                  disconnect_tref              , %%  reference()
-+                  disconnect_delay = 5000      , %%  non_neg_integer()
-+                  heartbeat_tref               , %%  reference() | triggered
-+                  heartbeat_delay = 25000      , %%  non_neg_integer()
-+                  ready_state = connecting     , %%  connecting | open | closed
-+                  close_msg                    , %%  {non_neg_integer(), string()}
-                   callback,
-                   state,
--                  handle                       :: handle()
-+                  handle                        %%  handle()
-                  }).
- -define(ETS, sockjs_table).
---type(session_or_undefined() :: session() | undefined).
---type(session_or_pid() :: session() | pid()).
-+%% -type(session_or_undefined() :: session() | undefined).
-+%% -type(session_or_pid() :: session() | pid()).
- %% --------------------------------------------------------------------------
---spec init() -> ok.
-+%% -spec init() -> ok.
- init() ->
-     _ = ets:new(?ETS, [public, named_table]),
-     ok.
---spec start_link(session_or_undefined(), service(), info()) -> {ok, pid()}.
-+%% -spec start_link(session_or_undefined(), service(), info()) -> {ok, pid()}.
- start_link(SessionId, Service, Info) ->
-     gen_server:start_link(?MODULE, {SessionId, Service, Info}, []).
---spec maybe_create(session_or_undefined(), service(), info()) -> pid().
-+%% -spec maybe_create(session_or_undefined(), service(), info()) -> pid().
- maybe_create(SessionId, Service, Info) ->
-     case ets:lookup(?ETS, SessionId) of
-         []          -> {ok, SPid} = sockjs_session_sup:start_child(
-@@ -53,7 +53,7 @@ maybe_create(SessionId, Service, Info) ->
-     end.
---spec received(list(iodata()), session_or_pid()) -> ok.
-+%% -spec received(list(iodata()), session_or_pid()) -> ok.
- received(Messages, SessionPid) when is_pid(SessionPid) ->
-     case gen_server:call(SessionPid, {received, Messages}, infinity) of
-         ok    -> ok;
-@@ -63,27 +63,27 @@ received(Messages, SessionPid) when is_pid(SessionPid) ->
- received(Messages, SessionId) ->
-     received(Messages, spid(SessionId)).
---spec send(iodata(), handle()) -> ok.
-+%% -spec send(iodata(), handle()) -> ok.
- send(Data, {?MODULE, {SPid, _}}) ->
-     gen_server:cast(SPid, {send, Data}),
-     ok.
---spec close(non_neg_integer(), string(), handle()) -> ok.
-+%% -spec close(non_neg_integer(), string(), handle()) -> ok.
- close(Code, Reason, {?MODULE, {SPid, _}}) ->
-     gen_server:cast(SPid, {close, Code, Reason}),
-     ok.
---spec info(handle()) -> info().
-+%% -spec info(handle()) -> info().
- info({?MODULE, {_SPid, Info}}) ->
-     Info.
---spec reply(session_or_pid()) ->
--                   wait | session_in_use | {ok | close, frame()}.
-+%% -spec reply(session_or_pid()) ->
-+%%                   wait | session_in_use | {ok | close, frame()}.
- reply(Session) ->
-     reply(Session, true).
---spec reply(session_or_pid(), boolean()) ->
--                   wait | session_in_use | {ok | close, frame()}.
-+%% -spec reply(session_or_pid(), boolean()) ->
-+%%                   wait | session_in_use | {ok | close, frame()}.
- reply(SessionPid, Multiple) when is_pid(SessionPid) ->
-     gen_server:call(SessionPid, {reply, self(), Multiple}, infinity);
- reply(SessionId, Multiple) ->
-@@ -154,7 +154,7 @@ unmark_waiting(RPid, State = #session{response_pid    = Pid,
-   when Pid =/= undefined andalso Pid =/= RPid ->
-     State.
---spec emit(emittable(), #session{}) -> #session{}.
-+%% -spec emit(emittable(), #session{}) -> #session{}.
- emit(What, State = #session{callback = Callback,
-                             state    = UserState,
-                             handle   = Handle}) ->
-@@ -175,7 +175,7 @@ emit(What, State = #session{callback = Callback,
- %% --------------------------------------------------------------------------
---spec init({session_or_undefined(), service(), info()}) -> {ok, #session{}}.
-+%% -spec init({session_or_undefined(), service(), info()}) -> {ok, #session{}}.
- init({SessionId, #service{callback         = Callback,
-                           state            = UserState,
-                           disconnect_delay = DisconnectDelay,
-diff --git a/src/sockjs_session_sup.erl b/src/sockjs_session_sup.erl
-index 4197ce3..71c7ff4 100644
---- a/src/sockjs_session_sup.erl
-+++ b/src/sockjs_session_sup.erl
-@@ -7,7 +7,7 @@
- %% --------------------------------------------------------------------------
---spec start_link() -> ignore | {'ok', pid()} | {'error', any()}.
-+%% -spec start_link() -> ignore | {'ok', pid()} | {'error', any()}.
- start_link() ->
-      supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-diff --git a/src/sockjs_util.erl b/src/sockjs_util.erl
-index be3f972..9b9969d 100644
---- a/src/sockjs_util.erl
-+++ b/src/sockjs_util.erl
-@@ -8,7 +8,7 @@
- %% --------------------------------------------------------------------------
---spec rand32() -> non_neg_integer().
-+%% -spec rand32() -> non_neg_integer().
- rand32() ->
-     case get(random_seeded) of
-         undefined ->
-@@ -21,7 +21,7 @@ rand32() ->
-     random:uniform(erlang:trunc(math:pow(2,32)))-1.
---spec encode_frame(frame()) -> iodata().
-+%% -spec encode_frame(frame()) -> iodata().
- encode_frame({open, nil}) ->
-     <<"o">>;
- encode_frame({close, {Code, Reason}}) ->
-@@ -34,7 +34,7 @@ encode_frame({heartbeat, nil}) ->
-     <<"h">>.
---spec url_escape(string(), string()) -> iolist().
-+%% -spec url_escape(string(), string()) -> iolist().
- url_escape(Str, Chars) ->
-     [case lists:member(Char, Chars) of
-          true  -> hex(Char);
-diff --git a/src/sockjs_ws_handler.erl b/src/sockjs_ws_handler.erl
-index bcf463d..c011c89 100644
---- a/src/sockjs_ws_handler.erl
-+++ b/src/sockjs_ws_handler.erl
-@@ -6,7 +6,7 @@
- %% --------------------------------------------------------------------------
---spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown.
-+%% -spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown.
- %% Ignore empty
- received(_RawWebsocket, _SessionPid, <<>>) ->
-     ok;
-@@ -30,7 +30,7 @@ session_received(Messages, SessionPid) ->
-         no_session -> shutdown
-     end.
---spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait.
-+%% -spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait.
- reply(websocket, SessionPid) ->
-     case sockjs_session:reply(SessionPid) of
-         {W, Frame} when W =:= ok orelse W =:= close->
-@@ -52,7 +52,7 @@ reply(rawwebsocket, SessionPid) ->
-             wait
-     end.
---spec close(websocket|rawwebsocket, pid()) -> ok.
-+%% -spec close(websocket|rawwebsocket, pid()) -> ok.
- close(_RawWebsocket, SessionPid) ->
-     SessionPid ! force_shutdown,
-     ok.
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0001-a2b-b2a.diff b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0001-a2b-b2a.diff
deleted file mode 100644 (file)
index b9a4d3e..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-diff --git a/src/sockjs_http.erl b/src/sockjs_http.erl
-index 5cdf431..837b64f 100644
---- a/src/sockjs_http.erl
-+++ b/src/sockjs_http.erl
-@@ -15,7 +15,7 @@ path({cowboy, Req})       -> {Path, Req1} = cowboy_http_req:raw_path(Req),
- %% -spec method(req()) -> {atom(), req()}.
- method({cowboy, Req})       -> {Method, Req1} = cowboy_http_req:method(Req),
-                                case is_binary(Method) of
--                                   true  -> {binary_to_atom(Method, utf8), {cowboy, Req1}};
-+                                   true  -> {list_to_atom(binary_to_list(Method)), {cowboy, Req1}};
-                                    false -> {Method, {cowboy, Req1}}
-                                end.
-@@ -47,7 +47,7 @@ header(K, {cowboy, Req})->
-     {H, Req2} = cowboy_http_req:header(K, Req),
-     {V, Req3} = case H of
-                     undefined ->
--                        cowboy_http_req:header(atom_to_binary(K, utf8), Req2);
-+                        cowboy_http_req:header(list_to_binary(atom_to_list(K)), Req2);
-                     _ -> {H, Req2}
-                 end,
-     case V of
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0002-parameterised-modules-r16a.diff b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0002-parameterised-modules-r16a.diff
deleted file mode 100644 (file)
index 29ba8a2..0000000
+++ /dev/null
@@ -1,477 +0,0 @@
-diff --git a/src/pmod_pt.erl b/src/pmod_pt.erl
-new file mode 100644
-index 0000000..db21974
---- /dev/null
-+++ b/src/pmod_pt.erl
-@@ -0,0 +1,461 @@
-+%%
-+%% %CopyrightBegin%
-+%%
-+%% Copyright Ericsson AB 2013. All Rights Reserved.
-+%%
-+%% The contents of this file are subject to the Erlang Public License,
-+%% Version 1.1, (the "License"); you may not use this file except in
-+%% compliance with the License. You should have received a copy of the
-+%% Erlang Public License along with this software. If not, it can be
-+%% retrieved online at http://www.erlang.org/.
-+%%
-+%% Software distributed under the License is distributed on an "AS IS"
-+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-+%% the License for the specific language governing rights and limitations
-+%% under the License.
-+%%
-+%% %CopyrightEnd%
-+%%
-+
-+-module(pmod_pt).
-+-export([parse_transform/2,
-+       format_error/1]).
-+
-+%% Expand function definition forms of parameterized module.
-+%% The code is based on the code in sys_expand_pmod which used to be
-+%% included in the compiler, but details are different because
-+%% sys_pre_expand has not been run. In particular:
-+%%
-+%% * Record definitions are still present and must be handled.
-+%%
-+%% * (Syntatic) local calls may actually be calls to an imported
-+%%   funtion or a BIF. It is a local call if and only if there
-+%%   is a definition for the function in the module.
-+%%
-+%% * When we introduce the module parameters and 'THIS' in each
-+%%   function, we must artificially use it to avoid a warning for
-+%%   unused variables.
-+%%
-+%% * On the other hand, we don't have to worry about module_info/0,1
-+%%   because they have not been added yet.
-+
-+-record(pmod, {parameters,
-+             defined
-+            }).
-+
-+parse_transform(Forms0, _Options) ->
-+    put(?MODULE, []),
-+    Forms = transform(Forms0),
-+    case erase(?MODULE) of
-+      [] ->
-+          Forms;
-+      [_|_]=Errors ->
-+          File = get_file(Forms),
-+          {error,[{File,Errors}],[]}
-+    end.
-+  
-+format_error(extends_self) ->
-+    "cannot extend from self";
-+format_error(define_instance) ->
-+    "defining instance function not allowed in parameterized module".
-+
-+add_error(Line, Error) ->
-+    put(?MODULE, get(?MODULE) ++ [{Line,?MODULE,Error}]).
-+
-+get_file([{attribute,_,file,{File,_}}|_]) -> File;
-+get_file([_|T]) -> get_file(T).
-+    
-+transform(Forms0) ->
-+    Def = collect_defined(Forms0),
-+    {Base,ModAs,Forms1} = attribs(Forms0, [], undefined, []),
-+    {Mod,Ps0} = case ModAs of
-+                  {M0,P0} -> {M0,P0};
-+                  M0 -> {M0,undefined}
-+              end,
-+    Forms2 = case Ps0 of
-+               undefined ->
-+                   Forms1;
-+               _ ->
-+                   pmod_expand(Forms1, Mod, Base, Ps0, Def)
-+           end,
-+
-+    %% Add new functions.
-+    NewFs0 = maybe_extend(Base, Mod, Ps0),
-+    NewExps = collect_defined(NewFs0),
-+    Forms3 = add_attributes(Forms2, [{attribute,0,export,NewExps}]),
-+    add_new_funcs(Forms3, NewFs0).
-+
-+pmod_expand(Forms0, Mod, Base, Ps0, Def) ->
-+    Ps = if is_atom(Base) ->
-+               ['BASE' | Ps0];
-+          true ->
-+               Ps0
-+       end,
-+    St0 = #pmod{parameters=Ps,defined=gb_sets:from_list(Def)},
-+    {Forms1,_} = forms(Forms0, St0),
-+    Forms2 = update_exps(Forms1),
-+    Forms3 = update_forms(Forms2),
-+    NewFs0 = add_instance(Mod, Ps, []),
-+    NewFs = ensure_new(Base, Ps0, NewFs0),
-+    Forms = add_new_funcs(Forms3, NewFs),
-+    NewExps = collect_defined(NewFs),
-+    add_attributes(Forms, [{attribute,0,export,NewExps}]).
-+
-+add_attributes([{attribute,_,module,_}=F|Fs], Attrs) ->
-+    [F|Attrs++Fs];
-+add_attributes([F|Fs], Attrs) ->
-+    [F|add_attributes(Fs, Attrs)].
-+
-+add_new_funcs([{eof,_}|_]=Fs, NewFs) ->
-+    NewFs ++ Fs;
-+add_new_funcs([F|Fs], Es) ->
-+    [F|add_new_funcs(Fs, Es)].
-+
-+maybe_extend([], _, _) ->
-+    %% No 'extends' attribute.
-+    [];
-+maybe_extend(Base, _Mod, undefined) ->
-+    %% There is a an 'extends' attribute; the module is not parameterized.
-+    Name = '$handle_undefined_function',
-+    Args = [{var,0,'Func'},{var,0,'Args'}],
-+    Body = [make_apply({atom,0,Base}, {var,0,'Func'}, {var,0,'Args'})],
-+    F = {function,0,Name,2,[{clause,0,Args,[],Body}]},
-+    [F];
-+maybe_extend(Base, Mod, Ps) ->
-+    %% There is a an 'extends' attribute; the module is parameterized.
-+    Name = '$handle_undefined_function',
-+    Args = [{var,0,'Func'},{var,0,'Args'}],
-+    DontCares = [{var,0,'_'} || _ <- Ps],
-+    TuplePs = {tuple,0,[{atom,0,Mod},{var,0,'BaseVars'}|DontCares]},
-+    G = [{call,0,{atom,0,is_atom},
-+        [{call,0,{atom,0,element},
-+          [{integer,0,1},{var,0,'BaseVars'}]}]}],
-+    FixedArgs = make_lists_rev([{var,0,'Rs'},
-+                              {cons,0,{var,0,'BaseVars'},{nil,0}}]),
-+    Body = [{'case',0,make_lists_rev([{var,0,'Args'}]),
-+           [{clause,0,[{cons,0,TuplePs,{var,0,'Rs'}}],[G],
-+             [make_apply({atom,0,Base}, {var,0,'Func'}, FixedArgs)]},
-+            {clause,0,[{var,0,'_'}],[],
-+             [make_apply({atom,0,Base}, {var,0,'Func'}, {var,0,'Args'})]}
-+           ]}],
-+    F = {function,0,Name,2,[{clause,0,Args,[],Body}]},
-+    [F].
-+
-+make_apply(M, F, A) ->
-+    {call,0,{remote,0,{atom,0,erlang},{atom,0,apply}},[M,F,A]}.
-+
-+make_lists_rev(As) ->
-+    {call,0,{remote,0,{atom,0,lists},{atom,0,reverse}},As}.
-+
-+ensure_new(Base, Ps, Fs) ->
-+    case has_new(Fs) of
-+      true ->
-+          Fs;
-+      false ->
-+          add_new(Base, Ps, Fs)
-+    end.
-+
-+has_new([{function,_L,new,_A,_Cs} | _Fs]) ->
-+    true;
-+has_new([_ | Fs]) ->
-+    has_new(Fs);
-+has_new([]) ->
-+    false.
-+
-+add_new(Base, Ps, Fs) ->
-+    Vs = [{var,0,V} || V <- Ps],
-+    As = if is_atom(Base) ->
-+               [{call,0,{remote,0,{atom,0,Base},{atom,0,new}},Vs} | Vs];
-+          true ->
-+               Vs
-+       end,
-+    Body = [{call,0,{atom,0,instance},As}],
-+    add_func(new, Vs, Body, Fs).
-+
-+add_instance(Mod, Ps, Fs) ->
-+    Vs = [{var,0,V} || V <- Ps],
-+    AbsMod = [{tuple,0,[{atom,0,Mod}|Vs]}],
-+    add_func(instance, Vs, AbsMod, Fs).
-+
-+add_func(Name, Args, Body, Fs) ->
-+    A = length(Args),
-+    F = {function,0,Name,A,[{clause,0,Args,[],Body}]},
-+    [F|Fs].
-+
-+collect_defined(Fs) ->
-+    [{N,A} || {function,_,N,A,_} <- Fs].
-+
-+attribs([{attribute,Line,module,{Mod,_}=ModAs}|T], Base, _, Acc) ->
-+    attribs(T, Base, ModAs, [{attribute,Line,module,Mod}|Acc]);
-+attribs([{attribute,_,module,Mod}=H|T], Base, _, Acc) ->
-+    attribs(T, Base, Mod, [H|Acc]);
-+attribs([{attribute,Line,extends,Base}|T], Base0, Ps, Acc) when is_atom(Base) ->
-+    Mod = case Ps of
-+            {Mod0,_} -> Mod0;
-+            Mod0 -> Mod0
-+        end,
-+    case Mod of
-+      Base ->
-+          add_error(Line, extends_self),
-+          attribs(T, Base0, Ps, Acc);
-+      _ ->
-+          attribs(T, Base, Ps, Acc)
-+    end;
-+attribs([H|T], Base, Ps, Acc) ->
-+    attribs(T, Base, Ps, [H|Acc]);
-+attribs([], Base, Ps, Acc) ->
-+    {Base,Ps,lists:reverse(Acc)}.
-+
-+%% This is extremely simplistic for now; all functions get an extra
-+%% parameter, whether they need it or not, except for static functions.
-+
-+update_function_name({F,A}) when F =/= new ->
-+    {F,A+1};
-+update_function_name(E) ->
-+    E.
-+
-+update_forms([{function,L,N,A,Cs}|Fs]) when N =/= new ->
-+    [{function,L,N,A+1,Cs}|update_forms(Fs)];
-+update_forms([F|Fs]) ->
-+    [F|update_forms(Fs)];
-+update_forms([]) ->
-+    [].
-+
-+update_exps([{attribute,Line,export,Es0}|T]) ->
-+    Es = [update_function_name(E) || E <- Es0],
-+    [{attribute,Line,export,Es}|update_exps(T)];
-+update_exps([H|T]) ->
-+    [H|update_exps(T)];
-+update_exps([]) ->
-+    [].
-+
-+%% Process the program forms.
-+
-+forms([F0|Fs0],St0) ->
-+    {F1,St1} = form(F0,St0),
-+    {Fs1,St2} = forms(Fs0,St1),
-+    {[F1|Fs1],St2};
-+forms([], St0) ->
-+    {[], St0}.
-+
-+%% Only function definitions are of interest here. State is not updated.
-+form({function,Line,instance,_Arity,_Clauses}=F,St) ->
-+    add_error(Line, define_instance),
-+    {F,St};
-+form({function,Line,Name0,Arity0,Clauses0},St) when Name0 =/= new ->
-+    {Name,Arity,Clauses} = function(Name0, Arity0, Clauses0, St),
-+    {{function,Line,Name,Arity,Clauses},St};
-+%% Pass anything else through
-+form(F,St) -> {F,St}.
-+
-+function(Name, Arity, Clauses0, St) ->
-+    Clauses1 = clauses(Clauses0,St),
-+    {Name,Arity,Clauses1}.
-+
-+clauses([C|Cs],#pmod{parameters=Ps}=St) ->
-+    {clause,L,H,G,B0} = clause(C,St),
-+    T = {tuple,L,[{var,L,V} || V <- ['_'|Ps]]},
-+    B = [{match,L,{var,L,'_'},{var,L,V}} || V <- ['THIS'|Ps]] ++ B0,
-+    [{clause,L,H++[{match,L,T,{var,L,'THIS'}}],G,B}|clauses(Cs,St)];
-+clauses([],_St) -> [].
-+
-+clause({clause,Line,H,G,B0},St) ->
-+    %% We never update H and G, so we will just copy them.
-+    B1 = exprs(B0,St),
-+    {clause,Line,H,G,B1}.
-+
-+pattern_grp([{bin_element,L1,E1,S1,T1} | Fs],St) ->
-+    S2 = case S1 of
-+           default ->
-+               default;
-+           _ ->
-+               expr(S1,St)
-+       end,
-+    T2 = case T1 of
-+           default ->
-+               default;
-+           _ ->
-+               bit_types(T1)
-+       end,
-+    [{bin_element,L1,expr(E1,St),S2,T2} | pattern_grp(Fs,St)];
-+pattern_grp([],_St) ->
-+    [].
-+
-+bit_types([]) ->
-+    [];
-+bit_types([Atom | Rest]) when is_atom(Atom) ->
-+    [Atom | bit_types(Rest)];
-+bit_types([{Atom, Integer} | Rest]) when is_atom(Atom), is_integer(Integer) ->
-+    [{Atom, Integer} | bit_types(Rest)].
-+
-+exprs([E0|Es],St) ->
-+    E1 = expr(E0,St),
-+    [E1|exprs(Es,St)];
-+exprs([],_St) -> [].
-+
-+expr({var,_L,_V}=Var,_St) ->
-+    Var;
-+expr({integer,_Line,_I}=Integer,_St) -> Integer;
-+expr({float,_Line,_F}=Float,_St) -> Float;
-+expr({atom,_Line,_A}=Atom,_St) -> Atom;
-+expr({string,_Line,_S}=String,_St) -> String;
-+expr({char,_Line,_C}=Char,_St) -> Char;
-+expr({nil,_Line}=Nil,_St) -> Nil;
-+expr({cons,Line,H0,T0},St) ->
-+    H1 = expr(H0,St),
-+    T1 = expr(T0,St),
-+    {cons,Line,H1,T1};
-+expr({lc,Line,E0,Qs0},St) ->
-+    Qs1 = lc_bc_quals(Qs0,St),
-+    E1 = expr(E0,St),
-+    {lc,Line,E1,Qs1};
-+expr({bc,Line,E0,Qs0},St) ->
-+    Qs1 = lc_bc_quals(Qs0,St),
-+    E1 = expr(E0,St),
-+    {bc,Line,E1,Qs1};
-+expr({tuple,Line,Es0},St) ->
-+    Es1 = expr_list(Es0,St),
-+    {tuple,Line,Es1};
-+expr({record,Line,Name,Is0},St) ->
-+    Is = record_fields(Is0,St),
-+    {record,Line,Name,Is};
-+expr({record,Line,E0,Name,Is0},St) ->
-+    E = expr(E0,St),
-+    Is = record_fields(Is0,St),
-+    {record,Line,E,Name,Is};
-+expr({record_field,Line,E0,Name,Key},St) ->
-+    E = expr(E0,St),
-+    {record_field,Line,E,Name,Key};
-+expr({block,Line,Es0},St) ->
-+    Es1 = exprs(Es0,St),
-+    {block,Line,Es1};
-+expr({'if',Line,Cs0},St) ->
-+    Cs1 = icr_clauses(Cs0,St),
-+    {'if',Line,Cs1};
-+expr({'case',Line,E0,Cs0},St) ->
-+    E1 = expr(E0,St),
-+    Cs1 = icr_clauses(Cs0,St),
-+    {'case',Line,E1,Cs1};
-+expr({'receive',Line,Cs0},St) ->
-+    Cs1 = icr_clauses(Cs0,St),
-+    {'receive',Line,Cs1};
-+expr({'receive',Line,Cs0,To0,ToEs0},St) ->
-+    To1 = expr(To0,St),
-+    ToEs1 = exprs(ToEs0,St),
-+    Cs1 = icr_clauses(Cs0,St),
-+    {'receive',Line,Cs1,To1,ToEs1};
-+expr({'try',Line,Es0,Scs0,Ccs0,As0},St) ->
-+    Es1 = exprs(Es0,St),
-+    Scs1 = icr_clauses(Scs0,St),
-+    Ccs1 = icr_clauses(Ccs0,St),
-+    As1 = exprs(As0,St),
-+    {'try',Line,Es1,Scs1,Ccs1,As1};
-+expr({'fun',_,{function,_,_,_}}=ExtFun,_St) ->
-+    ExtFun;
-+expr({'fun',Line,Body},St) ->
-+    case Body of
-+      {clauses,Cs0} ->
-+          Cs1 = fun_clauses(Cs0,St),
-+          {'fun',Line,{clauses,Cs1}};
-+      {function,F,A} = Function ->
-+          {F1,A1} = update_function_name({F,A}),
-+          if A1 =:= A ->
-+                  {'fun',Line,Function};
-+             true ->
-+                  %% Must rewrite local fun-name to a fun that does a
-+                  %% call with the extra THIS parameter.
-+                  As = make_vars(A, Line),
-+                  As1 = As ++ [{var,Line,'THIS'}],
-+                  Call = {call,Line,{atom,Line,F1},As1},
-+                  Cs = [{clause,Line,As,[],[Call]}],
-+                  {'fun',Line,{clauses,Cs}}
-+          end;
-+      {function,_M,_F,_A} = Fun4 ->           %This is an error in lint!
-+          {'fun',Line,Fun4}
-+    end;
-+expr({call,Lc,{atom,_,instance}=Name,As0},St) ->
-+    %% All local functions 'instance(...)' are static by definition,
-+    %% so they do not take a 'THIS' argument when called
-+    As1 = expr_list(As0,St),
-+    {call,Lc,Name,As1};
-+expr({call,Lc,{atom,_,new}=Name,As0},St) ->
-+    %% All local functions 'new(...)' are static by definition,
-+    %% so they do not take a 'THIS' argument when called
-+    As1 = expr_list(As0,St),
-+    {call,Lc,Name,As1};
-+expr({call,Lc,{atom,_Lf,F}=Atom,As0}, #pmod{defined=Def}=St) ->
-+    As1 = expr_list(As0,St),
-+    case gb_sets:is_member({F,length(As0)}, Def) of
-+      false ->
-+          %% BIF or imported function.
-+          {call,Lc,Atom,As1};
-+      true ->
-+          %% Local function call - needs THIS parameter.
-+          {call,Lc,Atom,As1 ++ [{var,0,'THIS'}]}
-+    end;
-+expr({call,Line,F0,As0},St) ->
-+    %% Other function call
-+    F1 = expr(F0,St),
-+    As1 = expr_list(As0,St),
-+    {call,Line,F1,As1};
-+expr({'catch',Line,E0},St) ->
-+    E1 = expr(E0,St),
-+    {'catch',Line,E1};
-+expr({match,Line,P,E0},St) ->
-+    E1 = expr(E0,St),
-+    {match,Line,P,E1};
-+expr({bin,Line,Fs},St) ->
-+    Fs2 = pattern_grp(Fs,St),
-+    {bin,Line,Fs2};
-+expr({op,Line,Op,A0},St) ->
-+    A1 = expr(A0,St),
-+    {op,Line,Op,A1};
-+expr({op,Line,Op,L0,R0},St) ->
-+    L1 = expr(L0,St),
-+    R1 = expr(R0,St),
-+    {op,Line,Op,L1,R1};
-+%% The following are not allowed to occur anywhere!
-+expr({remote,Line,M0,F0},St) ->
-+    M1 = expr(M0,St),
-+    F1 = expr(F0,St),
-+    {remote,Line,M1,F1}.
-+
-+expr_list([E0|Es],St) ->
-+    E1 = expr(E0,St),
-+    [E1|expr_list(Es,St)];
-+expr_list([],_St) -> [].
-+
-+record_fields([{record_field,L,K,E0}|T],St) ->
-+    E = expr(E0,St),
-+    [{record_field,L,K,E}|record_fields(T,St)];
-+record_fields([],_) -> [].
-+
-+icr_clauses([C0|Cs],St) ->
-+    C1 = clause(C0,St),
-+    [C1|icr_clauses(Cs,St)];
-+icr_clauses([],_St) -> [].
-+
-+lc_bc_quals([{generate,Line,P,E0}|Qs],St) ->
-+    E1 = expr(E0,St),
-+    [{generate,Line,P,E1}|lc_bc_quals(Qs,St)];
-+lc_bc_quals([{b_generate,Line,P,E0}|Qs],St) ->
-+    E1 = expr(E0,St),
-+    [{b_generate,Line,P,E1}|lc_bc_quals(Qs,St)];
-+lc_bc_quals([E0|Qs],St) ->
-+    E1 = expr(E0,St),
-+    [E1|lc_bc_quals(Qs,St)];
-+lc_bc_quals([],_St) -> [].
-+
-+fun_clauses([C0|Cs],St) ->
-+    C1 = clause(C0,St),
-+    [C1|fun_clauses(Cs,St)];
-+fun_clauses([],_St) -> [].
-+
-+make_vars(N, L) ->
-+    make_vars(1, N, L).
-+
-+make_vars(N, M, L) when N =< M ->
-+    V = list_to_atom("X"++integer_to_list(N)),
-+    [{var,L,V} | make_vars(N + 1, M, L)];
-+make_vars(_, _, _) ->
-+    [].
-diff --git a/src/sockjs_multiplex_channel.erl b/src/sockjs_multiplex_channel.erl
-index cbb8274..5afcfa3 100644
---- a/src/sockjs_multiplex_channel.erl
-+++ b/src/sockjs_multiplex_channel.erl
-@@ -1,3 +1,5 @@
-+-compile({parse_transform,pmod_pt}).
-+
- -module(sockjs_multiplex_channel, [Conn, Topic]).
- -export([send/1, close/0, close/2, info/0]).
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0003-websocket-subprotocol b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0003-websocket-subprotocol
deleted file mode 100644 (file)
index be298cb..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
-diff --git a/src/sockjs_cowboy_handler.erl b/src/sockjs_cowboy_handler.erl
-index 3b1ffe3..d2f05ae 100644
---- a/src/sockjs_cowboy_handler.erl
-+++ b/src/sockjs_cowboy_handler.erl
-@@ -30,21 +30,35 @@ terminate(_Req, _Service) ->
- %% --------------------------------------------------------------------------
--websocket_init(_TransportName, Req, Service = #service{logger = Logger}) ->
--    Req0 = Logger(Service, {cowboy, Req}, websocket),
-+websocket_init(_TransportName, Req,
-+               Service = #service{logger        = Logger,
-+                                  subproto_pref = SubProtocolPref}) ->
-+    Req3 = case cowboy_http_req:header(<<"Sec-Websocket-Protocol">>, Req) of
-+               {undefined, Req1} ->
-+                   Req1;
-+               {SubProtocols, Req1} ->
-+                   SelectedSubProtocol =
-+                     choose_subprotocol_bin(SubProtocols, SubProtocolPref),
-+                   {ok, Req2} = cowboy_http_req:set_resp_header(
-+                                  <<"Sec-Websocket-Protocol">>,
-+                                  SelectedSubProtocol, Req1),
-+                   Req2
-+           end,
-+
-+    Req4 = Logger(Service, {cowboy, Req3}, websocket),
-     Service1 = Service#service{disconnect_delay = 5*60*1000},
--    {Info, Req1} = sockjs_handler:extract_info(Req0),
-+    {Info, Req5} = sockjs_handler:extract_info(Req4),
-     SessionPid = sockjs_session:maybe_create(undefined, Service1, Info),
--    {RawWebsocket, {cowboy, Req3}} =
--        case sockjs_handler:get_action(Service, Req1) of
--            {{match, WS}, Req2} when WS =:= websocket orelse
-+    {RawWebsocket, {cowboy, Req7}} =
-+        case sockjs_handler:get_action(Service, Req5) of
-+            {{match, WS}, Req6} when WS =:= websocket orelse
-                                      WS =:= rawwebsocket ->
--                {WS, Req2}
-+                {WS, Req6}
-         end,
-     self() ! go,
--    {ok, Req3, {RawWebsocket, SessionPid}}.
-+    {ok, Req7, {RawWebsocket, SessionPid}}.
- websocket_handle({text, Data}, Req, {RawWebsocket, SessionPid} = S) ->
-     case sockjs_ws_handler:received(RawWebsocket, SessionPid, Data) of
-@@ -69,3 +83,15 @@ websocket_info(shutdown, Req, S) ->
- websocket_terminate(_Reason, _Req, {RawWebsocket, SessionPid}) ->
-     sockjs_ws_handler:close(RawWebsocket, SessionPid),
-     ok.
-+
-+%% --------------------------------------------------------------------------
-+
-+choose_subprotocol_bin(SubProtocols, Pref) ->
-+    choose_subprotocol(re:split(SubProtocols, ", *"), Pref).
-+choose_subprotocol(SubProtocols, undefined) ->
-+    erlang:hd(lists:reverse(lists:sort(SubProtocols)));
-+choose_subprotocol(SubProtocols, Pref) ->
-+    case lists:filter(fun (E) -> lists:member(E, SubProtocols) end, Pref) of
-+        [Hd | _] -> Hd;
-+        []       -> choose_subprotocol(SubProtocols, undefined)
-+    end.
-diff --git a/src/sockjs_handler.erl b/src/sockjs_handler.erl
-index b706453..81d4ef7 100644
---- a/src/sockjs_handler.erl
-+++ b/src/sockjs_handler.erl
-@@ -29,7 +29,9 @@ init_state(Prefix, Callback, State, Options) ->
-              response_limit =
-                  proplists:get_value(response_limit, Options, 128*1024),
-              logger =
--                 proplists:get_value(logger, Options, fun default_logger/3)
-+                 proplists:get_value(logger, Options, fun default_logger/3),
-+             subproto_pref =
-+                 proplists:get_value(subproto_pref, Options)
-             }.
- %% --------------------------------------------------------------------------
-diff --git a/src/sockjs_internal.hrl b/src/sockjs_internal.hrl
-index 629b2fe..eed5597 100644
---- a/src/sockjs_internal.hrl
-+++ b/src/sockjs_internal.hrl
-@@ -15,7 +15,8 @@
-                   disconnect_delay , %%  non_neg_integer()
-                   heartbeat_delay  , %%  non_neg_integer()
-                   response_limit   , %%  non_neg_integer()
--                  logger            %%  logger()
-+                  logger           , %%  logger()
-+                  subproto_pref      %%  [binary()]
-                   }).
- %% -type(service() :: #service{}).
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/CONTRIBUTING.md b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/Makefile b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/generate-0000-remove-spec-patch.sh b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/generate-0000-remove-spec-patch.sh
deleted file mode 100644 (file)
index fc2067f..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-# To update the patch run this script.
-cd sockjs-erlang-git/src
-git checkout *
-sed 's#^\(-type.*\)#%% \1#g' -i *
-sed 's#^\(-spec.*\)#%% \1#g' -i *
-sed 's#^\(-record.*\)::\(.*\)$#\1, %% \2#g' * -i
-sed 's#^\( .*\)::\(.*\),$#\1, %% \2#g' * -i
-sed 's#^\( .*\)::\(.*\)$#\1 %% \2#g' * -i
-git diff > ../../0000-remove-spec-patch.diff
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/hash.mk b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/hash.mk
deleted file mode 100644 (file)
index 45a2ab1..0000000
+++ /dev/null
@@ -1 +0,0 @@
-UPSTREAM_SHORT_HASH:=3132eb9
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/package.mk b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/package.mk
deleted file mode 100644 (file)
index 3613309..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-APP_NAME:=sockjs
-DEPS:=cowboy-wrapper
-
-UPSTREAM_GIT:=https://github.com/rabbitmq/sockjs-erlang.git
-UPSTREAM_REVISION:=3132eb920aea9abd5c5e65349331c32d8cfa961e # 0.3.4
-RETAIN_ORIGINAL_VERSION:=true
-WRAPPER_PATCHES:=\
-        0000-remove-spec-patch.diff \
-        0001-a2b-b2a.diff \
-        0002-parameterised-modules-r16a.diff \
-        0003-websocket-subprotocol
-
-ORIGINAL_APP_FILE:=$(CLONE_DIR)/src/$(APP_NAME).app.src
-DO_NOT_GENERATE_APP_FILE=true
-
-ERLC_OPTS:=$(ERLC_OPTS) -D no_specs
-
-define construct_app_commands
-       cp $(CLONE_DIR)/LICENSE-* $(APP_DIR)
-       rm $(APP_DIR)/ebin/pmod_pt.beam
-endef
-
-define package_rules
-
-$(CLONE_DIR)/ebin/sockjs_multiplex_channel.beam: $(CLONE_DIR)/ebin/pmod_pt.beam
-
-endef
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/.done b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/.done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/cowboy_echo.erl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/cowboy_echo.erl
deleted file mode 100755 (executable)
index 41f969d..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env escript
-%%! -smp disable +A1 +K true -pa ebin deps/cowboy/ebin -input
--module(cowboy_echo).
--mode(compile).
-
--export([main/1]).
-
-%% Cowboy callbacks
--export([init/3, handle/2, terminate/2]).
-
-
-main(_) ->
-    Port = 8081,
-    application:start(sockjs),
-    application:start(cowboy),
-
-    SockjsState = sockjs_handler:init_state(
-                    <<"/echo">>, fun service_echo/3, state, []),
-
-    VhostRoutes = [{[<<"echo">>, '...'], sockjs_cowboy_handler, SockjsState},
-                   {'_', ?MODULE, []}],
-    Routes = [{'_',  VhostRoutes}], % any vhost
-
-    io:format(" [*] Running at http://localhost:~p~n", [Port]),
-    cowboy:start_listener(http, 100,
-                          cowboy_tcp_transport, [{port,     Port}],
-                          cowboy_http_protocol, [{dispatch, Routes}]),
-    receive
-        _ -> ok
-    end.
-
-%% --------------------------------------------------------------------------
-
-init({_Any, http}, Req, []) ->
-    {ok, Req, []}.
-
-handle(Req, State) ->
-    {ok, Data} = file:read_file("./examples/echo.html"),
-    {ok, Req1} = cowboy_http_req:reply(200, [{<<"Content-Type">>, "text/html"}],
-                                       Data, Req),
-    {ok, Req1, State}.
-
-terminate(_Req, _State) ->
-    ok.
-
-%% --------------------------------------------------------------------------
-
-service_echo(_Conn, init, state)        -> {ok, state};
-service_echo(Conn, {recv, Data}, state) -> Conn:send(Data);
-service_echo(_Conn, closed, state)      -> {ok, state}.
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/multiplex.js b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/multiplex.js
deleted file mode 100644 (file)
index f525c1c..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-// ****
-
-var DumbEventTarget = function() {
-    this._listeners = {};
-};
-DumbEventTarget.prototype._ensure = function(type) {
-    if(!(type in this._listeners)) this._listeners[type] = [];
-};
-DumbEventTarget.prototype.addEventListener = function(type, listener) {
-    this._ensure(type);
-    this._listeners[type].push(listener);
-};
-DumbEventTarget.prototype.emit = function(type) {
-    this._ensure(type);
-    var args = Array.prototype.slice.call(arguments, 1);
-    if(this['on' + type]) this['on' + type].apply(this, args);
-    for(var i=0; i < this._listeners[type].length; i++) {
-        this._listeners[type][i].apply(this, args);
-    }
-};
-
-
-// ****
-
-var MultiplexedWebSocket = function(ws) {
-    var that = this;
-    this.ws = ws;
-    this.channels = {};
-    this.ws.addEventListener('message', function(e) {
-        var t = e.data.split(',');
-        var type = t.shift(), name = t.shift(),  payload = t.join();
-        if(!(name in that.channels)) {
-            return;
-        }
-        var sub = that.channels[name];
-
-        switch(type) {
-        case 'uns':
-            delete that.channels[name];
-            sub.emit('close', {});
-            break;
-        case 'msg':
-            sub.emit('message', {data: payload});
-            break
-        }
-    });
-};
-MultiplexedWebSocket.prototype.channel = function(raw_name) {
-    return this.channels[escape(raw_name)] =
-        new Channel(this.ws, escape(raw_name), this.channels);
-};
-
-
-var Channel = function(ws, name, channels) {
-    DumbEventTarget.call(this);
-    var that = this;
-    this.ws = ws;
-    this.name = name;
-    this.channels = channels;
-    var onopen = function() {
-        that.ws.send('sub,' + that.name);
-        that.emit('open');
-    };
-    if(ws.readyState > 0) {
-        setTimeout(onopen, 0);
-    } else {
-        this.ws.addEventListener('open', onopen);
-    }
-};
-Channel.prototype = new DumbEventTarget()
-
-Channel.prototype.send = function(data) {
-    this.ws.send('msg,' + this.name + ',' + data);
-};
-Channel.prototype.close = function() {
-    var that = this;
-    this.ws.send('uns,' + this.name);
-    delete this.channels[this.name];
-    setTimeout(function(){that.emit('close', {})},0);
-};
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar
deleted file mode 100755 (executable)
index 0d08040..0000000
Binary files a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar and /dev/null differ
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/pmod_pt.erl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/pmod_pt.erl
deleted file mode 100644 (file)
index db21974..0000000
+++ /dev/null
@@ -1,461 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2013. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(pmod_pt).
--export([parse_transform/2,
-        format_error/1]).
-
-%% Expand function definition forms of parameterized module.
-%% The code is based on the code in sys_expand_pmod which used to be
-%% included in the compiler, but details are different because
-%% sys_pre_expand has not been run. In particular:
-%%
-%% * Record definitions are still present and must be handled.
-%%
-%% * (Syntatic) local calls may actually be calls to an imported
-%%   funtion or a BIF. It is a local call if and only if there
-%%   is a definition for the function in the module.
-%%
-%% * When we introduce the module parameters and 'THIS' in each
-%%   function, we must artificially use it to avoid a warning for
-%%   unused variables.
-%%
-%% * On the other hand, we don't have to worry about module_info/0,1
-%%   because they have not been added yet.
-
--record(pmod, {parameters,
-              defined
-             }).
-
-parse_transform(Forms0, _Options) ->
-    put(?MODULE, []),
-    Forms = transform(Forms0),
-    case erase(?MODULE) of
-       [] ->
-           Forms;
-       [_|_]=Errors ->
-           File = get_file(Forms),
-           {error,[{File,Errors}],[]}
-    end.
-  
-format_error(extends_self) ->
-    "cannot extend from self";
-format_error(define_instance) ->
-    "defining instance function not allowed in parameterized module".
-
-add_error(Line, Error) ->
-    put(?MODULE, get(?MODULE) ++ [{Line,?MODULE,Error}]).
-
-get_file([{attribute,_,file,{File,_}}|_]) -> File;
-get_file([_|T]) -> get_file(T).
-    
-transform(Forms0) ->
-    Def = collect_defined(Forms0),
-    {Base,ModAs,Forms1} = attribs(Forms0, [], undefined, []),
-    {Mod,Ps0} = case ModAs of
-                   {M0,P0} -> {M0,P0};
-                   M0 -> {M0,undefined}
-               end,
-    Forms2 = case Ps0 of
-                undefined ->
-                    Forms1;
-                _ ->
-                    pmod_expand(Forms1, Mod, Base, Ps0, Def)
-            end,
-
-    %% Add new functions.
-    NewFs0 = maybe_extend(Base, Mod, Ps0),
-    NewExps = collect_defined(NewFs0),
-    Forms3 = add_attributes(Forms2, [{attribute,0,export,NewExps}]),
-    add_new_funcs(Forms3, NewFs0).
-
-pmod_expand(Forms0, Mod, Base, Ps0, Def) ->
-    Ps = if is_atom(Base) ->
-                ['BASE' | Ps0];
-           true ->
-                Ps0
-        end,
-    St0 = #pmod{parameters=Ps,defined=gb_sets:from_list(Def)},
-    {Forms1,_} = forms(Forms0, St0),
-    Forms2 = update_exps(Forms1),
-    Forms3 = update_forms(Forms2),
-    NewFs0 = add_instance(Mod, Ps, []),
-    NewFs = ensure_new(Base, Ps0, NewFs0),
-    Forms = add_new_funcs(Forms3, NewFs),
-    NewExps = collect_defined(NewFs),
-    add_attributes(Forms, [{attribute,0,export,NewExps}]).
-
-add_attributes([{attribute,_,module,_}=F|Fs], Attrs) ->
-    [F|Attrs++Fs];
-add_attributes([F|Fs], Attrs) ->
-    [F|add_attributes(Fs, Attrs)].
-
-add_new_funcs([{eof,_}|_]=Fs, NewFs) ->
-    NewFs ++ Fs;
-add_new_funcs([F|Fs], Es) ->
-    [F|add_new_funcs(Fs, Es)].
-
-maybe_extend([], _, _) ->
-    %% No 'extends' attribute.
-    [];
-maybe_extend(Base, _Mod, undefined) ->
-    %% There is a an 'extends' attribute; the module is not parameterized.
-    Name = '$handle_undefined_function',
-    Args = [{var,0,'Func'},{var,0,'Args'}],
-    Body = [make_apply({atom,0,Base}, {var,0,'Func'}, {var,0,'Args'})],
-    F = {function,0,Name,2,[{clause,0,Args,[],Body}]},
-    [F];
-maybe_extend(Base, Mod, Ps) ->
-    %% There is a an 'extends' attribute; the module is parameterized.
-    Name = '$handle_undefined_function',
-    Args = [{var,0,'Func'},{var,0,'Args'}],
-    DontCares = [{var,0,'_'} || _ <- Ps],
-    TuplePs = {tuple,0,[{atom,0,Mod},{var,0,'BaseVars'}|DontCares]},
-    G = [{call,0,{atom,0,is_atom},
-         [{call,0,{atom,0,element},
-           [{integer,0,1},{var,0,'BaseVars'}]}]}],
-    FixedArgs = make_lists_rev([{var,0,'Rs'},
-                               {cons,0,{var,0,'BaseVars'},{nil,0}}]),
-    Body = [{'case',0,make_lists_rev([{var,0,'Args'}]),
-            [{clause,0,[{cons,0,TuplePs,{var,0,'Rs'}}],[G],
-              [make_apply({atom,0,Base}, {var,0,'Func'}, FixedArgs)]},
-             {clause,0,[{var,0,'_'}],[],
-              [make_apply({atom,0,Base}, {var,0,'Func'}, {var,0,'Args'})]}
-            ]}],
-    F = {function,0,Name,2,[{clause,0,Args,[],Body}]},
-    [F].
-
-make_apply(M, F, A) ->
-    {call,0,{remote,0,{atom,0,erlang},{atom,0,apply}},[M,F,A]}.
-
-make_lists_rev(As) ->
-    {call,0,{remote,0,{atom,0,lists},{atom,0,reverse}},As}.
-
-ensure_new(Base, Ps, Fs) ->
-    case has_new(Fs) of
-       true ->
-           Fs;
-       false ->
-           add_new(Base, Ps, Fs)
-    end.
-
-has_new([{function,_L,new,_A,_Cs} | _Fs]) ->
-    true;
-has_new([_ | Fs]) ->
-    has_new(Fs);
-has_new([]) ->
-    false.
-
-add_new(Base, Ps, Fs) ->
-    Vs = [{var,0,V} || V <- Ps],
-    As = if is_atom(Base) ->
-                [{call,0,{remote,0,{atom,0,Base},{atom,0,new}},Vs} | Vs];
-           true ->
-                Vs
-        end,
-    Body = [{call,0,{atom,0,instance},As}],
-    add_func(new, Vs, Body, Fs).
-
-add_instance(Mod, Ps, Fs) ->
-    Vs = [{var,0,V} || V <- Ps],
-    AbsMod = [{tuple,0,[{atom,0,Mod}|Vs]}],
-    add_func(instance, Vs, AbsMod, Fs).
-
-add_func(Name, Args, Body, Fs) ->
-    A = length(Args),
-    F = {function,0,Name,A,[{clause,0,Args,[],Body}]},
-    [F|Fs].
-
-collect_defined(Fs) ->
-    [{N,A} || {function,_,N,A,_} <- Fs].
-
-attribs([{attribute,Line,module,{Mod,_}=ModAs}|T], Base, _, Acc) ->
-    attribs(T, Base, ModAs, [{attribute,Line,module,Mod}|Acc]);
-attribs([{attribute,_,module,Mod}=H|T], Base, _, Acc) ->
-    attribs(T, Base, Mod, [H|Acc]);
-attribs([{attribute,Line,extends,Base}|T], Base0, Ps, Acc) when is_atom(Base) ->
-    Mod = case Ps of
-             {Mod0,_} -> Mod0;
-             Mod0 -> Mod0
-         end,
-    case Mod of
-       Base ->
-           add_error(Line, extends_self),
-           attribs(T, Base0, Ps, Acc);
-       _ ->
-           attribs(T, Base, Ps, Acc)
-    end;
-attribs([H|T], Base, Ps, Acc) ->
-    attribs(T, Base, Ps, [H|Acc]);
-attribs([], Base, Ps, Acc) ->
-    {Base,Ps,lists:reverse(Acc)}.
-
-%% This is extremely simplistic for now; all functions get an extra
-%% parameter, whether they need it or not, except for static functions.
-
-update_function_name({F,A}) when F =/= new ->
-    {F,A+1};
-update_function_name(E) ->
-    E.
-
-update_forms([{function,L,N,A,Cs}|Fs]) when N =/= new ->
-    [{function,L,N,A+1,Cs}|update_forms(Fs)];
-update_forms([F|Fs]) ->
-    [F|update_forms(Fs)];
-update_forms([]) ->
-    [].
-
-update_exps([{attribute,Line,export,Es0}|T]) ->
-    Es = [update_function_name(E) || E <- Es0],
-    [{attribute,Line,export,Es}|update_exps(T)];
-update_exps([H|T]) ->
-    [H|update_exps(T)];
-update_exps([]) ->
-    [].
-
-%% Process the program forms.
-
-forms([F0|Fs0],St0) ->
-    {F1,St1} = form(F0,St0),
-    {Fs1,St2} = forms(Fs0,St1),
-    {[F1|Fs1],St2};
-forms([], St0) ->
-    {[], St0}.
-
-%% Only function definitions are of interest here. State is not updated.
-form({function,Line,instance,_Arity,_Clauses}=F,St) ->
-    add_error(Line, define_instance),
-    {F,St};
-form({function,Line,Name0,Arity0,Clauses0},St) when Name0 =/= new ->
-    {Name,Arity,Clauses} = function(Name0, Arity0, Clauses0, St),
-    {{function,Line,Name,Arity,Clauses},St};
-%% Pass anything else through
-form(F,St) -> {F,St}.
-
-function(Name, Arity, Clauses0, St) ->
-    Clauses1 = clauses(Clauses0,St),
-    {Name,Arity,Clauses1}.
-
-clauses([C|Cs],#pmod{parameters=Ps}=St) ->
-    {clause,L,H,G,B0} = clause(C,St),
-    T = {tuple,L,[{var,L,V} || V <- ['_'|Ps]]},
-    B = [{match,L,{var,L,'_'},{var,L,V}} || V <- ['THIS'|Ps]] ++ B0,
-    [{clause,L,H++[{match,L,T,{var,L,'THIS'}}],G,B}|clauses(Cs,St)];
-clauses([],_St) -> [].
-
-clause({clause,Line,H,G,B0},St) ->
-    %% We never update H and G, so we will just copy them.
-    B1 = exprs(B0,St),
-    {clause,Line,H,G,B1}.
-
-pattern_grp([{bin_element,L1,E1,S1,T1} | Fs],St) ->
-    S2 = case S1 of
-            default ->
-                default;
-            _ ->
-                expr(S1,St)
-        end,
-    T2 = case T1 of
-            default ->
-                default;
-            _ ->
-                bit_types(T1)
-        end,
-    [{bin_element,L1,expr(E1,St),S2,T2} | pattern_grp(Fs,St)];
-pattern_grp([],_St) ->
-    [].
-
-bit_types([]) ->
-    [];
-bit_types([Atom | Rest]) when is_atom(Atom) ->
-    [Atom | bit_types(Rest)];
-bit_types([{Atom, Integer} | Rest]) when is_atom(Atom), is_integer(Integer) ->
-    [{Atom, Integer} | bit_types(Rest)].
-
-exprs([E0|Es],St) ->
-    E1 = expr(E0,St),
-    [E1|exprs(Es,St)];
-exprs([],_St) -> [].
-
-expr({var,_L,_V}=Var,_St) ->
-    Var;
-expr({integer,_Line,_I}=Integer,_St) -> Integer;
-expr({float,_Line,_F}=Float,_St) -> Float;
-expr({atom,_Line,_A}=Atom,_St) -> Atom;
-expr({string,_Line,_S}=String,_St) -> String;
-expr({char,_Line,_C}=Char,_St) -> Char;
-expr({nil,_Line}=Nil,_St) -> Nil;
-expr({cons,Line,H0,T0},St) ->
-    H1 = expr(H0,St),
-    T1 = expr(T0,St),
-    {cons,Line,H1,T1};
-expr({lc,Line,E0,Qs0},St) ->
-    Qs1 = lc_bc_quals(Qs0,St),
-    E1 = expr(E0,St),
-    {lc,Line,E1,Qs1};
-expr({bc,Line,E0,Qs0},St) ->
-    Qs1 = lc_bc_quals(Qs0,St),
-    E1 = expr(E0,St),
-    {bc,Line,E1,Qs1};
-expr({tuple,Line,Es0},St) ->
-    Es1 = expr_list(Es0,St),
-    {tuple,Line,Es1};
-expr({record,Line,Name,Is0},St) ->
-    Is = record_fields(Is0,St),
-    {record,Line,Name,Is};
-expr({record,Line,E0,Name,Is0},St) ->
-    E = expr(E0,St),
-    Is = record_fields(Is0,St),
-    {record,Line,E,Name,Is};
-expr({record_field,Line,E0,Name,Key},St) ->
-    E = expr(E0,St),
-    {record_field,Line,E,Name,Key};
-expr({block,Line,Es0},St) ->
-    Es1 = exprs(Es0,St),
-    {block,Line,Es1};
-expr({'if',Line,Cs0},St) ->
-    Cs1 = icr_clauses(Cs0,St),
-    {'if',Line,Cs1};
-expr({'case',Line,E0,Cs0},St) ->
-    E1 = expr(E0,St),
-    Cs1 = icr_clauses(Cs0,St),
-    {'case',Line,E1,Cs1};
-expr({'receive',Line,Cs0},St) ->
-    Cs1 = icr_clauses(Cs0,St),
-    {'receive',Line,Cs1};
-expr({'receive',Line,Cs0,To0,ToEs0},St) ->
-    To1 = expr(To0,St),
-    ToEs1 = exprs(ToEs0,St),
-    Cs1 = icr_clauses(Cs0,St),
-    {'receive',Line,Cs1,To1,ToEs1};
-expr({'try',Line,Es0,Scs0,Ccs0,As0},St) ->
-    Es1 = exprs(Es0,St),
-    Scs1 = icr_clauses(Scs0,St),
-    Ccs1 = icr_clauses(Ccs0,St),
-    As1 = exprs(As0,St),
-    {'try',Line,Es1,Scs1,Ccs1,As1};
-expr({'fun',_,{function,_,_,_}}=ExtFun,_St) ->
-    ExtFun;
-expr({'fun',Line,Body},St) ->
-    case Body of
-       {clauses,Cs0} ->
-           Cs1 = fun_clauses(Cs0,St),
-           {'fun',Line,{clauses,Cs1}};
-       {function,F,A} = Function ->
-           {F1,A1} = update_function_name({F,A}),
-           if A1 =:= A ->
-                   {'fun',Line,Function};
-              true ->
-                   %% Must rewrite local fun-name to a fun that does a
-                   %% call with the extra THIS parameter.
-                   As = make_vars(A, Line),
-                   As1 = As ++ [{var,Line,'THIS'}],
-                   Call = {call,Line,{atom,Line,F1},As1},
-                   Cs = [{clause,Line,As,[],[Call]}],
-                   {'fun',Line,{clauses,Cs}}
-           end;
-       {function,_M,_F,_A} = Fun4 ->           %This is an error in lint!
-           {'fun',Line,Fun4}
-    end;
-expr({call,Lc,{atom,_,instance}=Name,As0},St) ->
-    %% All local functions 'instance(...)' are static by definition,
-    %% so they do not take a 'THIS' argument when called
-    As1 = expr_list(As0,St),
-    {call,Lc,Name,As1};
-expr({call,Lc,{atom,_,new}=Name,As0},St) ->
-    %% All local functions 'new(...)' are static by definition,
-    %% so they do not take a 'THIS' argument when called
-    As1 = expr_list(As0,St),
-    {call,Lc,Name,As1};
-expr({call,Lc,{atom,_Lf,F}=Atom,As0}, #pmod{defined=Def}=St) ->
-    As1 = expr_list(As0,St),
-    case gb_sets:is_member({F,length(As0)}, Def) of
-       false ->
-           %% BIF or imported function.
-           {call,Lc,Atom,As1};
-       true ->
-           %% Local function call - needs THIS parameter.
-           {call,Lc,Atom,As1 ++ [{var,0,'THIS'}]}
-    end;
-expr({call,Line,F0,As0},St) ->
-    %% Other function call
-    F1 = expr(F0,St),
-    As1 = expr_list(As0,St),
-    {call,Line,F1,As1};
-expr({'catch',Line,E0},St) ->
-    E1 = expr(E0,St),
-    {'catch',Line,E1};
-expr({match,Line,P,E0},St) ->
-    E1 = expr(E0,St),
-    {match,Line,P,E1};
-expr({bin,Line,Fs},St) ->
-    Fs2 = pattern_grp(Fs,St),
-    {bin,Line,Fs2};
-expr({op,Line,Op,A0},St) ->
-    A1 = expr(A0,St),
-    {op,Line,Op,A1};
-expr({op,Line,Op,L0,R0},St) ->
-    L1 = expr(L0,St),
-    R1 = expr(R0,St),
-    {op,Line,Op,L1,R1};
-%% The following are not allowed to occur anywhere!
-expr({remote,Line,M0,F0},St) ->
-    M1 = expr(M0,St),
-    F1 = expr(F0,St),
-    {remote,Line,M1,F1}.
-
-expr_list([E0|Es],St) ->
-    E1 = expr(E0,St),
-    [E1|expr_list(Es,St)];
-expr_list([],_St) -> [].
-
-record_fields([{record_field,L,K,E0}|T],St) ->
-    E = expr(E0,St),
-    [{record_field,L,K,E}|record_fields(T,St)];
-record_fields([],_) -> [].
-
-icr_clauses([C0|Cs],St) ->
-    C1 = clause(C0,St),
-    [C1|icr_clauses(Cs,St)];
-icr_clauses([],_St) -> [].
-
-lc_bc_quals([{generate,Line,P,E0}|Qs],St) ->
-    E1 = expr(E0,St),
-    [{generate,Line,P,E1}|lc_bc_quals(Qs,St)];
-lc_bc_quals([{b_generate,Line,P,E0}|Qs],St) ->
-    E1 = expr(E0,St),
-    [{b_generate,Line,P,E1}|lc_bc_quals(Qs,St)];
-lc_bc_quals([E0|Qs],St) ->
-    E1 = expr(E0,St),
-    [E1|lc_bc_quals(Qs,St)];
-lc_bc_quals([],_St) -> [].
-
-fun_clauses([C0|Cs],St) ->
-    C1 = clause(C0,St),
-    [C1|fun_clauses(Cs,St)];
-fun_clauses([],_St) -> [].
-
-make_vars(N, L) ->
-    make_vars(1, N, L).
-
-make_vars(N, M, L) when N =< M ->
-    V = list_to_atom("X"++integer_to_list(N)),
-    [{var,L,V} | make_vars(N + 1, M, L)];
-make_vars(_, _, _) ->
-    [].
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.app.src b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.app.src
deleted file mode 100644 (file)
index 4e21412..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{application, sockjs,
- [
-  {description, "SockJS"},
-  {vsn, "0.3.4"},
-  {modules, []},
-  {registered, []},
-  {applications, [
-                  kernel,
-                  stdlib
-                 ]},
-  {mod, { sockjs_app, []}}
- ]}.
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.erl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.erl
deleted file mode 100644 (file)
index 98b1173..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
--module(sockjs).
-
--export([send/2, close/1, close/3, info/1]).
-
-%% -type(conn() :: {sockjs_session, any()}).
-
-%% Send data over a connection.
-%% -spec send(iodata(), conn()) -> ok.
-send(Data, Conn = {sockjs_session, _}) ->
-    sockjs_session:send(Data, Conn).
-
-%% Initiate a close of a connection.
-%% -spec close(conn()) -> ok.
-close(Conn) ->
-    close(1000, "Normal closure", Conn).
-
-%% -spec close(non_neg_integer(), string(), conn()) -> ok.
-close(Code, Reason, Conn = {sockjs_session, _}) ->
-    sockjs_session:close(Code, Reason, Conn).
-
-%% -spec info(conn()) -> [{atom(), any()}].
-info(Conn = {sockjs_session, _}) ->
-    sockjs_session:info(Conn).
-
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_http.erl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_http.erl
deleted file mode 100644 (file)
index 837b64f..0000000
+++ /dev/null
@@ -1,137 +0,0 @@
--module(sockjs_http).
-
--export([path/1, method/1, body/1, body_qs/1, header/2, jsessionid/1,
-         callback/1, peername/1, sockname/1]).
--export([reply/4, chunk_start/3, chunk/2, chunk_end/1]).
--export([hook_tcp_close/1, unhook_tcp_close/1, abruptly_kill/1]).
--include("sockjs_internal.hrl").
-
-%% --------------------------------------------------------------------------
-
-%% -spec path(req()) -> {string(), req()}.
-path({cowboy, Req})       -> {Path, Req1} = cowboy_http_req:raw_path(Req),
-                             {binary_to_list(Path), {cowboy, Req1}}.
-
-%% -spec method(req()) -> {atom(), req()}.
-method({cowboy, Req})       -> {Method, Req1} = cowboy_http_req:method(Req),
-                               case is_binary(Method) of
-                                   true  -> {list_to_atom(binary_to_list(Method)), {cowboy, Req1}};
-                                   false -> {Method, {cowboy, Req1}}
-                               end.
-
-%% -spec body(req()) -> {binary(), req()}.
-body({cowboy, Req})       -> {ok, Body, Req1} = cowboy_http_req:body(Req),
-                             {Body, {cowboy, Req1}}.
-
-%% -spec body_qs(req()) -> {binary(), req()}.
-body_qs(Req) ->
-    {H, Req1} =  header('Content-Type', Req),
-    case H of
-        H when H =:= "text/plain" orelse H =:= "" ->
-            body(Req1);
-        _ ->
-            %% By default assume application/x-www-form-urlencoded
-            body_qs2(Req1)
-    end.
-body_qs2({cowboy, Req}) ->
-    {BodyQS, Req1} = cowboy_http_req:body_qs(Req),
-    case proplists:get_value(<<"d">>, BodyQS) of
-        undefined ->
-            {<<>>, {cowboy, Req1}};
-        V ->
-            {V, {cowboy, Req1}}
-    end.
-
-%% -spec header(atom(), req()) -> {nonempty_string() | undefined, req()}.
-header(K, {cowboy, Req})->
-    {H, Req2} = cowboy_http_req:header(K, Req),
-    {V, Req3} = case H of
-                    undefined ->
-                        cowboy_http_req:header(list_to_binary(atom_to_list(K)), Req2);
-                    _ -> {H, Req2}
-                end,
-    case V of
-        undefined -> {undefined, {cowboy, Req3}};
-        _         -> {binary_to_list(V), {cowboy, Req3}}
-    end.
-
-%% -spec jsessionid(req()) -> {nonempty_string() | undefined, req()}.
-jsessionid({cowboy, Req}) ->
-    {C, Req2} = cowboy_http_req:cookie(<<"JSESSIONID">>, Req),
-    case C of
-        _ when is_binary(C) ->
-            {binary_to_list(C), {cowboy, Req2}};
-        undefined ->
-            {undefined, {cowboy, Req2}}
-    end.
-
-%% -spec callback(req()) -> {nonempty_string() | undefined, req()}.
-callback({cowboy, Req}) ->
-    {CB, Req1} = cowboy_http_req:qs_val(<<"c">>, Req),
-    case CB of
-        undefined -> {undefined, {cowboy, Req1}};
-        _         -> {binary_to_list(CB), {cowboy, Req1}}
-    end.
-
-%% -spec peername(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
-peername({cowboy, Req}) ->
-    {P, Req1} = cowboy_http_req:peer(Req),
-    {P, {cowboy, Req1}}.
-
-%% -spec sockname(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}.
-sockname({cowboy, Req} = R) ->
-    {ok, _T, S} = cowboy_http_req:transport(Req),
-    %% Cowboy has peername(), but doesn't have sockname() equivalent.
-    {ok, Addr} = case S of
-                     _ when is_port(S) ->
-                         inet:sockname(S);
-                     _ ->
-                         {ok, {{0,0,0,0}, 0}}
-                 end,
-    {Addr, R}.
-
-%% --------------------------------------------------------------------------
-
-%% -spec reply(non_neg_integer(), headers(), iodata(), req()) -> req().
-reply(Code, Headers, Body, {cowboy, Req}) ->
-    Body1 = iolist_to_binary(Body),
-    {ok, Req1} = cowboy_http_req:reply(Code, enbinary(Headers), Body1, Req),
-    {cowboy, Req1}.
-
-%% -spec chunk_start(non_neg_integer(), headers(), req()) -> req().
-chunk_start(Code, Headers, {cowboy, Req}) ->
-    {ok, Req1} = cowboy_http_req:chunked_reply(Code, enbinary(Headers), Req),
-    {cowboy, Req1}.
-
-%% -spec chunk(iodata(), req()) -> {ok | error, req()}.
-chunk(Chunk, {cowboy, Req} = R) ->
-    case cowboy_http_req:chunk(Chunk, Req) of
-        ok          -> {ok, R};
-        {error, _E} -> {error, R}
-                      %% This shouldn't happen too often, usually we
-                      %% should catch tco socket closure before.
-    end.
-
-%% -spec chunk_end(req()) -> req().
-chunk_end({cowboy, _Req} = R)  -> R.
-
-enbinary(L) -> [{list_to_binary(K), list_to_binary(V)} || {K, V} <- L].
-
-
-%% -spec hook_tcp_close(req()) -> req().
-hook_tcp_close(R = {cowboy, Req}) ->
-    {ok, T, S} = cowboy_http_req:transport(Req),
-    T:setopts(S,[{active,once}]),
-    R.
-
-%% -spec unhook_tcp_close(req()) -> req().
-unhook_tcp_close(R = {cowboy, Req}) ->
-    {ok, T, S} = cowboy_http_req:transport(Req),
-    T:setopts(S,[{active,false}]),
-    R.
-
-%% -spec abruptly_kill(req()) -> req().
-abruptly_kill(R = {cowboy, Req}) ->
-    {ok, T, S} = cowboy_http_req:transport(Req),
-    T:close(S),
-    R.
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_internal.hrl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_internal.hrl
deleted file mode 100644 (file)
index eed5597..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-
-%% -type(req()          :: {cowboy, any()}).
-
-%% -type(user_session() :: nonempty_string()).
-%% -type(emittable()    :: init|closed|{recv, binary()}).
-%% -type(callback()     :: fun((user_session(), emittable(), any()) -> ok)).
-%% -type(logger()       :: fun((any(), req(), websocket|http) -> req())).
-
--record(service, {prefix           , %%  nonempty_string(),
-                  callback         , %%  callback()
-                  state            , %%  any()
-                  sockjs_url       , %%  nonempty_string()
-                  cookie_needed    , %%  boolean()
-                  websocket        , %%  boolean()
-                  disconnect_delay , %%  non_neg_integer()
-                  heartbeat_delay  , %%  non_neg_integer()
-                  response_limit   , %%  non_neg_integer()
-                  logger           , %%  logger()
-                  subproto_pref      %%  [binary()]
-                  }).
-
-%% -type(service() :: #service{}).
-
-%% -type(headers() :: list({nonempty_string(), nonempty_string()})).
-%% -type(server()  :: nonempty_string()).
-%% -type(session() :: nonempty_string()).
-
-%% -type(frame()   :: {open, nil} |
-%%                   {close, {non_neg_integer(), string()}} |
-%%                   {data, list(iodata())} |
-%%                   {heartbeat, nil} ).
-
-%% -type(info()    :: [{atom(), any()}]).
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex.erl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex.erl
deleted file mode 100644 (file)
index b4ff03a..0000000
+++ /dev/null
@@ -1,79 +0,0 @@
--module(sockjs_multiplex).
-
--behaviour(sockjs_service).
-
--export([init_state/1]).
--export([sockjs_init/2, sockjs_handle/3, sockjs_terminate/2]).
-
--record(service, {callback, state, vconn}).
-
-%% --------------------------------------------------------------------------
-
-init_state(Services) ->
-    L = [{Topic, #service{callback = Callback, state = State}} ||
-            {Topic, Callback, State} <- Services],
-    {orddict:from_list(L), orddict:new()}.
-
-
-
-sockjs_init(_Conn, {_Services, _Channels} = S) ->
-    {ok, S}.
-
-sockjs_handle(Conn, Data, {Services, Channels}) ->
-    [Type, Topic, Payload] = split($,, binary_to_list(Data), 3),
-    case orddict:find(Topic, Services) of
-        {ok, Service} ->
-            Channels1 = action(Conn, {Type, Topic, Payload}, Service, Channels),
-            {ok, {Services, Channels1}};
-        _Else ->
-            {ok, {Services, Channels}}
-    end.
-
-sockjs_terminate(_Conn, {Services, Channels}) ->
-    _ = [ {emit(closed, Channel)} ||
-            {_Topic, Channel} <- orddict:to_list(Channels) ],
-    {ok, {Services, orddict:new()}}.
-
-
-action(Conn, {Type, Topic, Payload}, Service, Channels) ->
-    case {Type, orddict:is_key(Topic, Channels)} of
-        {"sub", false} ->
-            Channel = Service#service{
-                         vconn = sockjs_multiplex_channel:new(
-                                   Conn, Topic)
-                        },
-            orddict:store(Topic, emit(init, Channel), Channels);
-        {"uns", true} ->
-            Channel = orddict:fetch(Topic, Channels),
-            emit(closed, Channel),
-            orddict:erase(Topic, Channels);
-        {"msg", true} ->
-            Channel = orddict:fetch(Topic, Channels),
-            orddict:store(Topic, emit({recv, Payload}, Channel), Channels);
-        _Else ->
-            %% Ignore
-            Channels
-    end.
-
-
-emit(What, Channel = #service{callback = Callback,
-                              state    = State,
-                              vconn    = VConn}) ->
-    case Callback(VConn, What, State) of
-        {ok, State1} -> Channel#service{state = State1};
-        ok           -> Channel
-    end.
-
-
-%% --------------------------------------------------------------------------
-
-split(Char, Str, Limit) ->
-    Acc = split(Char, Str, Limit, []),
-    lists:reverse(Acc).
-split(_Char, _Str, 0, Acc) -> Acc;
-split(Char, Str, Limit, Acc) ->
-    {L, R} = case string:chr(Str, Char) of
-                 0 -> {Str, ""};
-                 I -> {string:substr(Str, 1, I-1), string:substr(Str, I+1)}
-             end,
-    split(Char, R, Limit-1, [L | Acc]).
diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex_channel.erl b/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex_channel.erl
deleted file mode 100644 (file)
index 5afcfa3..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
--compile({parse_transform,pmod_pt}).
-
--module(sockjs_multiplex_channel, [Conn, Topic]).
-
--export([send/1, close/0, close/2, info/0]).
-
-send(Data) ->
-    Conn:send(iolist_to_binary(["msg", ",", Topic, ",", Data])).
-
-close() ->
-    close(1000, "Normal closure").
-
-close(_Code, _Reason) ->
-    Conn:send(iolist_to_binary(["uns", ",", Topic])).
-
-info() ->
-    Conn:info() ++ [{topic, Topic}].
-
diff --git a/rabbitmq-server/plugins-src/umbrella.mk b/rabbitmq-server/plugins-src/umbrella.mk
deleted file mode 100644 (file)
index 5764ff3..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-# The default goal
-dist:
-
-UMBRELLA_BASE_DIR:=..
-
-include $(UMBRELLA_BASE_DIR)/common.mk
-
-# We start at the initial package (i.e. the one in the current directory)
-PACKAGE_DIR:=$(call canonical_path,.)
-
-# Produce all of the releasable artifacts of this package
-.PHONY: dist
-dist: $(PACKAGE_DIR)+dist
-
-# Produce a source tarball for this package
-.PHONY: srcdist
-srcdist: $(PACKAGE_DIR)+srcdist
-
-# Clean the package and all its dependencies
-.PHONY: clean
-clean: $(PACKAGE_DIR)+clean-with-deps
-
-# Clean just the initial package
-.PHONY: clean-local
-clean-local: $(PACKAGE_DIR)+clean
-
-# Run erlang with the package, its tests, and all its dependencies
-# available.
-.PHONY: run
-run: $(PACKAGE_DIR)+run
-
-# Run the broker with the package, its tests, and all its dependencies
-# available.
-.PHONY: run-in-broker
-run-in-broker: $(PACKAGE_DIR)+run-in-broker
-
-# Runs the package's tests
-.PHONY: test
-test: $(PACKAGE_DIR)+test
-
-# Test the package with code coverage recording on.  Note that
-# coverage only covers the in-broker tests.
-.PHONY: coverage
-coverage: $(PACKAGE_DIR)+coverage
-
-# Runs the package's tests
-.PHONY: check-xref
-check-xref: $(PACKAGE_DIR)+check-xref
-
-# Do the initial package
-include $(UMBRELLA_BASE_DIR)/do-package.mk
-
-# We always need the coverage package to support the coverage goal
-PACKAGE_DIR:=$(COVERAGE_PATH)
-$(eval $(call do_package,$(COVERAGE_PATH)))
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/webmachine-wrapper/.srcdist_done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/10-remove-crypto-dependency.patch b/rabbitmq-server/plugins-src/webmachine-wrapper/10-remove-crypto-dependency.patch
deleted file mode 100644 (file)
index 7cabbd4..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-diff --git a/src/webmachine.app.src b/src/webmachine.app.src
-index eb949a2..2c46c3f 100644
---- a/src/webmachine.app.src
-+++ b/src/webmachine.app.src
-@@ -7,7 +7,6 @@
-   {registered, []},
-   {applications, [kernel,
-                   stdlib,
--                  crypto,
-                   mochiweb]},
-   {mod, {webmachine_app, []}},
-   {env, []}
-diff --git a/src/webmachine.erl b/src/webmachine.erl
-index 47f1ce2..2e5be1b 100644
---- a/src/webmachine.erl
-+++ b/src/webmachine.erl
-@@ -28,7 +28,6 @@
- %% @doc Start the webmachine server.
- start() ->
-     webmachine_deps:ensure(),
--    application:start(crypto),
-     application:start(webmachine).
- %% @spec stop() -> ok
-diff --git a/src/webmachine_decision_core.erl b/src/webmachine_decision_core.erl
-index 194c48d..3379388 100644
---- a/src/webmachine_decision_core.erl
-+++ b/src/webmachine_decision_core.erl
-@@ -722,32 +722,17 @@ variances() ->
-     end,
-     Accept ++ AcceptEncoding ++ AcceptCharset ++ resource_call(variances).
---ifndef(old_hash).
- md5(Bin) ->
--    crypto:hash(md5, Bin).
-+    erlang:md5(Bin).
- md5_init() ->
--    crypto:hash_init(md5).
-+    erlang:md5_init().
- md5_update(Ctx, Bin) ->
--    crypto:hash_update(Ctx, Bin).
-+    erlang:md5_update(Ctx, Bin).
- md5_final(Ctx) ->
--    crypto:hash_final(Ctx).
---else.
--md5(Bin) ->
--    crypto:md5(Bin).
--
--md5_init() ->
--    crypto:md5_init().
--
--md5_update(Ctx, Bin) ->
--    crypto:md5_update(Ctx, Bin).
--
--md5_final(Ctx) ->
--    crypto:md5_final(Ctx).
---endif.
--
-+    erlang:md5_final(Ctx).
- compute_body_md5() ->
-     case wrcall({req_body, 52428800}) of
-diff --git a/src/webmachine_request.erl b/src/webmachine_request.erl
-index 2a5ff7a..ee459a3 100644
---- a/src/webmachine_request.erl
-+++ b/src/webmachine_request.erl
-@@ -624,7 +624,7 @@ parts_to_body(BodyList, Size, Req) when is_list(BodyList) ->
-             {CT, _} ->
-                 CT
-         end,
--    Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
-+    Boundary = mochihex:to_hex(mochiweb_util:rand_bytes(8)),
-     HeaderList = [{"Content-Type",
-                    ["multipart/byteranges; ",
-                     "boundary=", Boundary]}],
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/CONTRIBUTING.md b/rabbitmq-server/plugins-src/webmachine-wrapper/CONTRIBUTING.md
deleted file mode 100644 (file)
index 69a4b4a..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-## Overview
-
-RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
-Pull requests is the primary place of discussing code changes.
-
-## How to Contribute
-
-The process is fairly standard:
-
- * Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
- * Create a branch with a descriptive name in the relevant repositories
- * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
- * Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
- * Be patient. We will get to your pull request eventually
-
-If what you are going to work on is a substantial change, please first ask the core team
-of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
-
-
-## (Brief) Code of Conduct
-
-In one line: don't be a dick.
-
-Be respectful to the maintainers and other contributors. Open source
-contributors put long hours into developing projects and doing user
-support. Those projects and user support are available for free. We
-believe this deserves some respect.
-
-Be respectful to people of all races, genders, religious beliefs and
-political views. Regardless of how brilliant a pull request is
-technically, we will not tolerate disrespectful or aggressive
-behaviour.
-
-Contributors who violate this straightforward Code of Conduct will see
-their pull requests closed and locked.
-
-
-## Contributor Agreement
-
-If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
-you submit your pull request. This will make it much easier (in some cases, possible)
-for the RabbitMQ team at Pivotal to merge your contribution.
-
-
-## Where to Ask Questions
-
-If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/Makefile b/rabbitmq-server/plugins-src/webmachine-wrapper/Makefile
deleted file mode 100644 (file)
index 482105a..0000000
+++ /dev/null
@@ -1 +0,0 @@
-include ../umbrella.mk
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/hash.mk b/rabbitmq-server/plugins-src/webmachine-wrapper/hash.mk
deleted file mode 100644 (file)
index d5fc525..0000000
+++ /dev/null
@@ -1 +0,0 @@
-UPSTREAM_SHORT_HASH:=e9359c7
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/license_info b/rabbitmq-server/plugins-src/webmachine-wrapper/license_info
deleted file mode 100644 (file)
index c00fb92..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-Webmachine is Copyright (c) Basho Technologies and is covered by the
-Apache License 2.0.  It was downloaded from http://webmachine.basho.com/
-
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/package.mk b/rabbitmq-server/plugins-src/webmachine-wrapper/package.mk
deleted file mode 100644 (file)
index 65770e7..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-APP_NAME:=webmachine
-DEPS:=mochiweb-wrapper
-
-UPSTREAM_GIT:=https://github.com/rabbitmq/webmachine.git
-UPSTREAM_REVISION:=e9359c7092b228f671417abe68319913f1aebe46
-RETAIN_ORIGINAL_VERSION:=true
-
-WRAPPER_PATCHES:=10-remove-crypto-dependency.patch
-
-ORIGINAL_APP_FILE=$(CLONE_DIR)/src/$(APP_NAME).app.src
-DO_NOT_GENERATE_APP_FILE=true
-
-define package_rules
-
-# This rule is run *before* the one in do_package.mk
-$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done::
-       cp $(CLONE_DIR)/LICENSE $(PACKAGE_DIR)/LICENSE-Apache-Basho
-
-endef
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.done b/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.done
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.travis.yml b/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.travis.yml
deleted file mode 100644 (file)
index e603470..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-language: erlang
-notifications:
-  webhooks: http://basho-engbot.herokuapp.com/travis?key=66724b424957d598311ba00bb2d137fcae4eae21
-  email: eng@basho.com
-otp_release:
-  - R15B01
-  - R15B
-  - R14B04
-  - R14B03
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config b/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config
deleted file mode 100644 (file)
index 50393dc..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-%%-*- mode: erlang -*-
-{erl_opts, [warnings_as_errors]}.
-{cover_enabled, true}.
-{edoc_opts, [{preprocess, true}]}.
-
-{deps, [
-        {mochiweb, "1.5.1*", {git, "git://github.com/basho/mochiweb",
-                            {tag, "1.5.1p6"}}}
-        ]}.
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.app.src b/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.app.src
deleted file mode 100644 (file)
index 2c46c3f..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-%%-*- mode: erlang -*-
-{application, webmachine,
- [
-  {description, "webmachine"},
-  {vsn, "1.10.3"},
-  {modules, []},
-  {registered, []},
-  {applications, [kernel,
-                  stdlib,
-                  mochiweb]},
-  {mod, {webmachine_app, []}},
-  {env, []}
- ]}.
diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/test/etag_test.erl b/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/test/etag_test.erl
deleted file mode 100644 (file)
index 2f4bbe9..0000000
+++ /dev/null
@@ -1,145 +0,0 @@
-%% @author Justin Sheehy <justin@basho.com>
-%% @author Andy Gross <andy@basho.com>
-%% @copyright 2007-2010 Basho Technologies
-%%
-%%    Licensed under the Apache License, Version 2.0 (the "License");
-%%    you may not use this file except in compliance with the License.
-%%    You may obtain a copy of the License at
-%%
-%%        http://www.apache.org/licenses/LICENSE-2.0
-%%
-%%    Unless required by applicable law or agreed to in writing, software
-%%    distributed under the License is distributed on an "AS IS" BASIS,
-%%    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%%    See the License for the specific language governing permissions and
-%%    limitations under the License.
-
--module(etag_test).
-
-
--ifdef(EQC).
-
--include("wm_reqdata.hrl").
--include_lib("eqc/include/eqc.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--compile(export_all).
-
--define(QC_OUT(P),
-        eqc:on_output(fun(Str, Args) -> io:format(user, Str, Args) end, P)).
-
-unique(L) ->
-    lists:reverse(lists:foldl(fun(Elem, Acc) ->
-                                      case lists:member(Elem, Acc) of
-                                          true ->
-                                              Acc;
-                                          false ->
-                                              [Elem | Acc]
-                                      end
-                              end, [], L)).
-
-etag(Bin) ->
-    integer_to_list(erlang:crc32(Bin)).
-
-etag_list([]) ->
-    "*";
-etag_list(Bins) ->
-    string:join([[$", etag(B), $"] || B <- Bins], ",").
-
-http_request(_Match, _IfVals, _NewVal, 0) ->
-    error;
-http_request(Match, IfVals, NewVal, Count) ->
-    case httpc:request(put, {"http://localhost:12000/etagtest/foo",
-                             [{Match, etag_list(IfVals)}],
-                             "binary/octet-stream",
-                             NewVal},
-                       [], []) of
-        {ok, Result} ->
-            {ok, Result};
-        {error, socket_closed_remotely} ->
-            io:format(user, "Retry!\n", []),
-            http_request(Match, IfVals, NewVal, Count-1)
-    end.
-
-etag_prop() ->
-    ?LET({AllVals, Match}, {non_empty(list(binary())), oneof(["If-Match", "If-None-Match"])},
-         ?FORALL({IfVals0, CurVal, NewVal},
-                 {list(oneof(AllVals)), oneof(AllVals), oneof(AllVals)},
-                 begin
-                     ets:insert(?MODULE, [{etag, etag(CurVal)}]),
-                     IfVals = unique(IfVals0),
-                     {ok, Result} = http_request(Match, IfVals, NewVal, 3),
-                     Code = element(2, element(1, Result)),
-                     ExpectedCode =
-                         expected_response_code(Match,
-                                                IfVals,
-                                                lists:member(CurVal, IfVals)),
-                     equals(ExpectedCode, Code)
-                 end)).
-
-expected_response_code("If-Match", _, true) ->
-    204;
-expected_response_code("If-Match", [], false) ->
-    204;
-expected_response_code("If-Match", _, false) ->
-    412;
-expected_response_code("If-None-Match", _, true) ->
-    412;
-expected_response_code("If-None-Match", [], false) ->
-    412;
-expected_response_code("If-None-Match", _, false) ->
-    204.
-
-etag_test_() ->
-    {spawn,
-     [{setup,
-       fun setup/0,
-       fun cleanup/1,
-       [
-        {timeout, 12,
-         ?_assert(eqc:quickcheck(eqc:testing_time(10, ?QC_OUT(etag_prop()))))}
-       ]}]}.
-
-setup() ->
-    %% Setup ETS table to hold current etag value
-    ets:new(?MODULE, [named_table, public]),
-
-    %% Spin up webmachine
-    WebConfig = [{ip, "0.0.0.0"}, {port, 12000},
-                 {dispatch, [{["etagtest", '*'], ?MODULE, []}]}],
-    {ok, Pid0} = webmachine_sup:start_link(),
-    {ok, Pid1} = webmachine_mochiweb:start(WebConfig),
-    link(Pid1),
-    {Pid0, Pid1}.
-
-cleanup({Pid0, Pid1}) ->
-    %% clean up
-    unlink(Pid0),
-    exit(Pid0, kill),
-    unlink(Pid1),
-    exit(Pid1, kill).
-
-init([]) ->
-    {ok, undefined}.
-
-allowed_methods(ReqData, Context) ->
-    {['PUT'], ReqData, Context}.
-
-content_types_accepted(ReqData, Context) ->
-    {[{"binary/octet-stream", on_put}], ReqData, Context}.
-
-on_put(ReqData, Context) ->
-    {ok, ReqData, Context}.
-
-generate_etag(ReqData, Context) ->
-    case ets:lookup(?MODULE, etag) of
-        [] ->
-            {undefined, ReqData, Context};
-        [{etag, ETag}] ->
-            {ETag, ReqData, Context}
-    end.
-
-ping(ReqData, State) ->
-    {pong, ReqData, State}.
-
--endif.
diff --git a/rabbitmq-server/scripts/rabbitmq-env.bat b/rabbitmq-server/scripts/rabbitmq-env.bat
deleted file mode 100755 (executable)
index 7465072..0000000
+++ /dev/null
@@ -1,257 +0,0 @@
-@echo off
-
-REM Scopes the variables to the current batch file
-REM setlocal
-
-rem Preserve values that might contain exclamation marks before
-rem enabling delayed expansion
-set TDP0=%~dp0
-REM setlocal enabledelayedexpansion
-
-REM SCRIPT_DIR=`dirname $SCRIPT_PATH`
-REM RABBITMQ_HOME="${SCRIPT_DIR}/.."
-set SCRIPT_DIR=%TDP0%
-set RABBITMQ_HOME=%SCRIPT_DIR%..
-
-REM ## Set defaults
-REM . ${SCRIPT_DIR}/rabbitmq-defaults
-call "%SCRIPT_DIR%\rabbitmq-defaults.bat"
-
-REM These common defaults aren't referenced in the batch scripts
-REM ## Common defaults
-REM SERVER_ERL_ARGS="+P 1048576"
-REM 
-REM # warn about old rabbitmq.conf file, if no new one
-REM if [ -f /etc/rabbitmq/rabbitmq.conf ] && \
-REM    [ ! -f ${CONF_ENV_FILE} ] ; then
-REM     echo -n "WARNING: ignoring /etc/rabbitmq/rabbitmq.conf -- "
-REM     echo "location has moved to ${CONF_ENV_FILE}"
-REM fi
-
-REM ERL_ARGS aren't referenced in the batch scripts
-REM Common defaults
-REM set SERVER_ERL_ARGS=+P 1048576
-
-REM ## Get configuration variables from the configure environment file
-REM [ -f ${CONF_ENV_FILE} ] && . ${CONF_ENV_FILE} || true
-if exist "!RABBITMQ_CONF_ENV_FILE!" (
-       call "!RABBITMQ_CONF_ENV_FILE!"
-)
-
-REM Check for the short names here too
-if "!RABBITMQ_USE_LONGNAME!"=="" (
-    if "!USE_LONGNAME!"=="" (
-           set RABBITMQ_NAME_TYPE="-sname"
-       )
-)
-
-if "!RABBITMQ_USE_LONGNAME!"=="true" (
-    if "!USE_LONGNAME!"=="true" (
-        set RABBITMQ_NAME_TYPE="-name"
-       )
-)
-
-if "!COMPUTERNAME!"=="" (
-    set COMPUTERNAME=localhost
-)
-
-REM [ "x" = "x$RABBITMQ_NODENAME" ] && RABBITMQ_NODENAME=${NODENAME}
-if "!RABBITMQ_NODENAME!"=="" (
-    if "!NODENAME!"=="" (
-        set RABBITMQ_NODENAME=rabbit@!COMPUTERNAME!
-    ) else (
-        set RABBITMQ_NODENAME=!NODENAME!
-    )
-)
-
-REM 
-REM ##--- Set environment vars RABBITMQ_<var_name> to defaults if not set
-REM 
-REM DEFAULT_NODE_IP_ADDRESS=auto
-REM DEFAULT_NODE_PORT=5672
-REM [ "x" = "x$RABBITMQ_NODE_IP_ADDRESS" ] && RABBITMQ_NODE_IP_ADDRESS=${NODE_IP_ADDRESS}
-REM [ "x" = "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_NODE_PORT=${NODE_PORT}
-REM [ "x" = "x$RABBITMQ_NODE_IP_ADDRESS" ] && [ "x" != "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_NODE_IP_ADDRESS=${DEFAULT_NODE_IP_ADDRESS}
-REM [ "x" != "x$RABBITMQ_NODE_IP_ADDRESS" ] && [ "x" = "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_NODE_PORT=${DEFAULT_NODE_PORT}
-
-REM if "!RABBITMQ_NODE_IP_ADDRESS!"=="" (
-REM    if not "!RABBITMQ_NODE_PORT!"=="" (
-REM       set RABBITMQ_NODE_IP_ADDRESS=auto
-REM    )
-REM ) else (
-REM    if "!RABBITMQ_NODE_PORT!"=="" (
-REM       set RABBITMQ_NODE_PORT=5672
-REM    )
-REM )
-
-if "!RABBITMQ_NODE_IP_ADDRESS!"=="" (
-       if not "!NODE_IP_ADDRESS!"=="" (
-               set RABBITMQ_NODE_IP_ADDRESS=!NODE_IP_ADDRESS!
-       )
-)
-
-if "!RABBITMQ_NODE_PORT!"=="" (
-       if not "!NODE_PORT!"=="" (
-               set RABBITMQ_NODE_PORT=!NODE_PORT!
-       )
-)
-
-if "!RABBITMQ_NODE_IP_ADDRESS!"=="" (
-    if not "!RABBITMQ_NODE_PORT!"=="" (
-       set RABBITMQ_NODE_IP_ADDRESS=auto
-    )
-) else (
-    if "!RABBITMQ_NODE_PORT!"=="" (
-       set RABBITMQ_NODE_PORT=5672
-    )
-)
-
-REM [ "x" = "x$RABBITMQ_DIST_PORT" ] && RABBITMQ_DIST_PORT=${DIST_PORT}
-REM [ "x" = "x$RABBITMQ_DIST_PORT" ] && [ "x" = "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_DIST_PORT=$((${DEFAULT_NODE_PORT} + 20000))
-REM [ "x" = "x$RABBITMQ_DIST_PORT" ] && [ "x" != "x$RABBITMQ_NODE_PORT" ] && RABBITMQ_DIST_PORT=$((${RABBITMQ_NODE_PORT} + 20000))
-
-if "!RABBITMQ_DIST_PORT!"=="" (
-       if "!DIST_PORT!"=="" (
-          if "!RABBITMQ_NODE_PORT!"=="" (
-                 set RABBITMQ_DIST_PORT=25672
-          ) else (
-                 set /a RABBITMQ_DIST_PORT=20000+!RABBITMQ_NODE_PORT!
-          )
-   ) else (
-               set RABBITMQ_DIST_PORT=!DIST_PORT!
-   )
-)
-
-REM [ "x" = "x$RABBITMQ_SERVER_ERL_ARGS" ] && RABBITMQ_SERVER_ERL_ARGS=${SERVER_ERL_ARGS}
-REM No Windows equivalent
-
-REM [ "x" = "x$RABBITMQ_CONFIG_FILE" ] && RABBITMQ_CONFIG_FILE=${CONFIG_FILE}
-if "!RABBITMQ_CONFIG_FILE!"=="" (
-       if "!CONFIG_FILE!"=="" (
-               set RABBITMQ_CONFIG_FILE=!RABBITMQ_BASE!\rabbitmq
-       ) else (
-               set RABBITMQ_CONFIG_FILE=!CONFIG_FILE!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_LOG_BASE" ] && RABBITMQ_LOG_BASE=${LOG_BASE}
-if "!RABBITMQ_LOG_BASE!"=="" (
-       if "!LOG_BASE!"=="" (
-               set RABBITMQ_LOG_BASE=!RABBITMQ_BASE!\log
-       ) else (
-               set RABBITMQ_LOG_BASE=!LOG_BASE!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_MNESIA_BASE" ] && RABBITMQ_MNESIA_BASE=${MNESIA_BASE}
-if "!RABBITMQ_MNESIA_BASE!"=="" (
-       if "!MNESIA_BASE!"=="" (
-               set RABBITMQ_MNESIA_BASE=!RABBITMQ_BASE!\db
-       ) else (
-               set RABBITMQ_MNESIA_BASE=!MNESIA_BASE!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_SERVER_START_ARGS" ] && RABBITMQ_SERVER_START_ARGS=${SERVER_START_ARGS}
-REM No Windows equivalent 
-
-REM [ "x" = "x$RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS" ] && RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=${SERVER_ADDITIONAL_ERL_ARGS}
-REM No Windows equivalent
-
-REM [ "x" = "x$RABBITMQ_MNESIA_DIR" ] && RABBITMQ_MNESIA_DIR=${MNESIA_DIR}
-REM [ "x" = "x$RABBITMQ_MNESIA_DIR" ] && RABBITMQ_MNESIA_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME}
-if "!RABBITMQ_MNESIA_DIR!"=="" (
-       if "!MNESIA_DIR!"=="" (
-               set RABBITMQ_MNESIA_DIR=!RABBITMQ_MNESIA_BASE!/!RABBITMQ_NODENAME!-mnesia
-       ) else (
-               set RABBITMQ_MNESIA_DIR=!MNESIA_DIR!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_PID_FILE" ] && RABBITMQ_PID_FILE=${PID_FILE}
-REM [ "x" = "x$RABBITMQ_PID_FILE" ] && RABBITMQ_PID_FILE=${RABBITMQ_MNESIA_DIR}.pid
-REM No Windows equivalent
-
-REM [ "x" = "x$RABBITMQ_BOOT_MODULE" ] && RABBITMQ_BOOT_MODULE=${BOOT_MODULE}
-if "!RABBITMQ_BOOT_MODULE!"=="" (
-       if "!BOOT_MODULE!"=="" (
-               set RABBITMQ_BOOT_MODULE=rabbit
-       ) else (
-               set RABBITMQ_BOOT_MODULE=!BOOT_MODULE!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_PLUGINS_EXPAND_DIR" ] && RABBITMQ_PLUGINS_EXPAND_DIR=${PLUGINS_EXPAND_DIR}
-REM [ "x" = "x$RABBITMQ_PLUGINS_EXPAND_DIR" ] && RABBITMQ_PLUGINS_EXPAND_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME}-plugins-expand
-if "!RABBITMQ_PLUGINS_EXPAND_DIR!"=="" (
-       if "!PLUGINS_EXPAND_DIR!"=="" (
-               set RABBITMQ_PLUGINS_EXPAND_DIR=!RABBITMQ_MNESIA_BASE!/!RABBITMQ_NODENAME!-plugins-expand
-       ) else (
-               set RABBITMQ_PLUGINS_EXPAND_DIR=!PLUGINS_EXPAND_DIR!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_ENABLED_PLUGINS_FILE" ] && RABBITMQ_ENABLED_PLUGINS_FILE=${ENABLED_PLUGINS_FILE}
-if "!RABBITMQ_ENABLED_PLUGINS_FILE!"=="" (
-       if "!ENABLED_PLUGINS_FILE!"=="" (
-               set RABBITMQ_ENABLED_PLUGINS_FILE=!RABBITMQ_BASE!\enabled_plugins
-       ) else (
-               set RABBITMQ_ENABLED_PLUGINS_FILE=!ENABLED_PLUGINS_FILE!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_PLUGINS_DIR" ] && RABBITMQ_PLUGINS_DIR=${PLUGINS_DIR}
-if "!RABBITMQ_PLUGINS_DIR!"=="" (
-       if "!PLUGINS_DIR!"=="" (
-               set RABBITMQ_PLUGINS_DIR=!RABBITMQ_BASE!\plugins
-       ) else (
-               set RABBITMQ_PLUGINS_DIR=!PLUGINS_DIR!
-       )
-)
-
-REM ## Log rotation
-REM [ "x" = "x$RABBITMQ_LOGS" ] && RABBITMQ_LOGS=${LOGS}
-REM [ "x" = "x$RABBITMQ_LOGS" ] && RABBITMQ_LOGS="${RABBITMQ_LOG_BASE}/${RABBITMQ_NODENAME}.log"
-if "!RABBITMQ_LOGS!"=="" (
-       if "!LOGS!"=="" (
-               set LOGS=!RABBITMQ_LOG_BASE!\!RABBITMQ_NODENAME!.log
-       ) else (
-               set LOGS=!LOGS!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_SASL_LOGS" ] && RABBITMQ_SASL_LOGS=${SASL_LOGS}
-REM [ "x" = "x$RABBITMQ_SASL_LOGS" ] && RABBITMQ_SASL_LOGS="${RABBITMQ_LOG_BASE}/${RABBITMQ_NODENAME}-sasl.log"
-if "!RABBITMQ_SASL_LOGS!"=="" (
-       if "!SASL_LOGS!"=="" (
-               set SASL_LOGS=!RABBITMQ_LOG_BASE!\!RABBITMQ_NODENAME!-sasl.log
-       ) else (
-               set SASL_LOGS=!SASL_LOGS!
-       )
-)
-
-REM [ "x" = "x$RABBITMQ_CTL_ERL_ARGS" ] && RABBITMQ_CTL_ERL_ARGS=${CTL_ERL_ARGS}
-if "!$RABBITMQ_CTL_ERL_ARGS!"=="" (
-       if not "!CTL_ERL_ARGS!"=="" (
-               set RABBITMQ_CTL_ERL_ARGS=!CTL_ERL_ARGS!
-       )
-)
-
-REM ADDITIONAL WINDOWS ONLY CONFIG ITEMS
-REM rabbitmq-plugins.bat
-REM if "!RABBITMQ_SERVICENAME!"=="" (
-REM     set RABBITMQ_SERVICENAME=RabbitMQ
-REM )
-
-if "!RABBITMQ_SERVICENAME!"=="" (
-       if "!SERVICENAME!"=="" (
-               set RABBITMQ_SERVICENAME=RabbitMQ
-       ) else (
-               set RABBITMQ_SERVICENAME=!SERVICENAME!
-       )
-)
-REM ##--- End of overridden <var_name> variables
-REM 
-REM # Since we source this elsewhere, don't accidentally stop execution
-REM true
diff --git a/rabbitmq-server/src/pg2_fixed.erl b/rabbitmq-server/src/pg2_fixed.erl
deleted file mode 100644 (file)
index 8926b83..0000000
+++ /dev/null
@@ -1,400 +0,0 @@
-%% This is the version of pg2 from R14B02, which contains the fix
-%% described at
-%% http://erlang.2086793.n4.nabble.com/pg2-still-busted-in-R13B04-td2230601.html.
-%% The changes are a search-and-replace to rename the module and avoid
-%% clashes with other versions of pg2, and also a simple rewrite of
-%% "andalso" and "orelse" expressions to case statements where the second
-%% operand is not a boolean since R12B does not allow this.
-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
--module(pg2_fixed).
-
--export([create/1, delete/1, join/2, leave/2]).
--export([get_members/1, get_local_members/1]).
--export([get_closest_pid/1, which_groups/0]).
--export([start/0,start_link/0,init/1,handle_call/3,handle_cast/2,handle_info/2,
-         terminate/2]).
-
-%%% As of R13B03 monitors are used instead of links.
-
-%%%
-%%% Exported functions
-%%%
-
--spec start_link() -> {'ok', pid()} | {'error', term()}.
-
-start_link() ->
-    gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
--spec start() -> {'ok', pid()} | {'error', term()}.
-
-start() ->
-    ensure_started().
-
--spec create(term()) -> 'ok'.
-
-create(Name) ->
-    ensure_started(),
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        false ->
-            global:trans({{?MODULE, Name}, self()},
-                         fun() ->
-                                 gen_server:multi_call(?MODULE, {create, Name})
-                         end),
-            ok;
-        true ->
-            ok
-    end.
-
--type name() :: term().
-
--spec delete(name()) -> 'ok'.
-
-delete(Name) ->
-    ensure_started(),
-    global:trans({{?MODULE, Name}, self()},
-                 fun() ->
-                         gen_server:multi_call(?MODULE, {delete, Name})
-                 end),
-    ok.
-
--spec join(name(), pid()) -> 'ok' | {'error', {'no_such_group', term()}}.
-
-join(Name, Pid) when is_pid(Pid) ->
-    ensure_started(),
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        false ->
-            {error, {no_such_group, Name}};
-        true ->
-            global:trans({{?MODULE, Name}, self()},
-                         fun() ->
-                                 gen_server:multi_call(?MODULE,
-                                                       {join, Name, Pid})
-                         end),
-            ok
-    end.
-
--spec leave(name(), pid()) -> 'ok' | {'error', {'no_such_group', name()}}.
-
-leave(Name, Pid) when is_pid(Pid) ->
-    ensure_started(),
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        false ->
-            {error, {no_such_group, Name}};
-        true ->
-            global:trans({{?MODULE, Name}, self()},
-                         fun() ->
-                                 gen_server:multi_call(?MODULE,
-                                                       {leave, Name, Pid})
-                         end),
-            ok
-    end.
-
--type get_members_ret() :: [pid()] | {'error', {'no_such_group', name()}}.
-
--spec get_members(name()) -> get_members_ret().
-   
-get_members(Name) ->
-    ensure_started(),
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        true ->
-            group_members(Name);
-        false ->
-            {error, {no_such_group, Name}}
-    end.
-
--spec get_local_members(name()) -> get_members_ret().
-
-get_local_members(Name) ->
-    ensure_started(),
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        true ->
-            local_group_members(Name);
-        false ->
-            {error, {no_such_group, Name}}
-    end.
-
--spec which_groups() -> [name()].
-
-which_groups() ->
-    ensure_started(),
-    all_groups().
-
--type gcp_error_reason() :: {'no_process', term()} | {'no_such_group', term()}.
-
--spec get_closest_pid(term()) -> pid() | {'error', gcp_error_reason()}.
-
-get_closest_pid(Name) ->
-    case get_local_members(Name) of
-        [Pid] ->
-            Pid;
-        [] ->
-            {_,_,X} = erlang:now(),
-            case get_members(Name) of
-                [] -> {error, {no_process, Name}};
-                Members ->
-                    lists:nth((X rem length(Members))+1, Members)
-            end;
-        Members when is_list(Members) ->
-            {_,_,X} = erlang:now(),
-            lists:nth((X rem length(Members))+1, Members);
-        Else ->
-            Else
-    end.
-
-%%%
-%%% Callback functions from gen_server
-%%%
-
--record(state, {}).
-
--spec init([]) -> {'ok', #state{}}.
-
-init([]) ->
-    Ns = nodes(),
-    net_kernel:monitor_nodes(true),
-    lists:foreach(fun(N) ->
-                          {?MODULE, N} ! {new_pg2_fixed, node()},
-                          self() ! {nodeup, N}
-                  end, Ns),
-    pg2_fixed_table = ets:new(pg2_fixed_table, [ordered_set, protected, named_table]),
-    {ok, #state{}}.
-
--type call() :: {'create', name()}
-              | {'delete', name()}
-              | {'join', name(), pid()}
-              | {'leave', name(), pid()}.
-
--spec handle_call(call(), _, #state{}) -> 
-        {'reply', 'ok', #state{}}.
-
-handle_call({create, Name}, _From, S) ->
-    assure_group(Name),
-    {reply, ok, S};
-handle_call({join, Name, Pid}, _From, S) ->
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        true -> join_group(Name, Pid);
-        _    -> ok
-    end,
-    {reply, ok, S};
-handle_call({leave, Name, Pid}, _From, S) ->
-    case ets:member(pg2_fixed_table, {group, Name}) of
-        true -> leave_group(Name, Pid);
-        _    -> ok
-    end,
-    {reply, ok, S};
-handle_call({delete, Name}, _From, S) ->
-    delete_group(Name),
-    {reply, ok, S};
-handle_call(Request, From, S) ->
-    error_logger:warning_msg("The pg2_fixed server received an unexpected message:\n"
-                             "handle_call(~p, ~p, _)\n", 
-                             [Request, From]),
-    {noreply, S}.
-
--type all_members() :: [[name(),...]].
--type cast() :: {'exchange', node(), all_members()}
-              | {'del_member', name(), pid()}.
-
--spec handle_cast(cast(), #state{}) -> {'noreply', #state{}}.
-
-handle_cast({exchange, _Node, List}, S) ->
-    store(List),
-    {noreply, S};
-handle_cast(_, S) ->
-    %% Ignore {del_member, Name, Pid}.
-    {noreply, S}.
-
--spec handle_info(tuple(), #state{}) -> {'noreply', #state{}}.
-
-handle_info({'DOWN', MonitorRef, process, _Pid, _Info}, S) ->
-    member_died(MonitorRef),
-    {noreply, S};
-handle_info({nodeup, Node}, S) ->
-    gen_server:cast({?MODULE, Node}, {exchange, node(), all_members()}),
-    {noreply, S};
-handle_info({new_pg2_fixed, Node}, S) ->
-    gen_server:cast({?MODULE, Node}, {exchange, node(), all_members()}),
-    {noreply, S};
-handle_info(_, S) ->
-    {noreply, S}.
-
--spec terminate(term(), #state{}) -> 'ok'.
-
-terminate(_Reason, _S) ->
-    true = ets:delete(pg2_fixed_table),
-    ok.
-
-%%%
-%%% Local functions
-%%%
-
-%%% One ETS table, pg2_fixed_table, is used for bookkeeping. The type of the
-%%% table is ordered_set, and the fast matching of partially
-%%% instantiated keys is used extensively.
-%%%
-%%% {{group, Name}}
-%%%    Process group Name.
-%%% {{ref, Pid}, RPid, MonitorRef, Counter}
-%%% {{ref, MonitorRef}, Pid}
-%%%    Each process has one monitor. Sometimes a process is spawned to
-%%%    monitor the pid (RPid). Counter is incremented when the Pid joins
-%%%    some group.
-%%% {{member, Name, Pid}, GroupCounter}
-%%% {{local_member, Name, Pid}}
-%%%    Pid is a member of group Name, GroupCounter is incremented when the
-%%%    Pid joins the group Name.
-%%% {{pid, Pid, Name}}
-%%%    Pid is a member of group Name.
-
-store(List) ->
-    _ = [case assure_group(Name) of
-             true ->
-                 [join_group(Name, P) || P <- Members -- group_members(Name)];
-             _ ->
-                 ok
-         end || [Name, Members] <- List],
-    ok.
-
-assure_group(Name) ->
-    Key = {group, Name},
-    ets:member(pg2_fixed_table, Key) orelse true =:= ets:insert(pg2_fixed_table, {Key}).
-
-delete_group(Name) ->
-    _ = [leave_group(Name, Pid) || Pid <- group_members(Name)],
-    true = ets:delete(pg2_fixed_table, {group, Name}),
-    ok.
-
-member_died(Ref) ->
-    [{{ref, Ref}, Pid}] = ets:lookup(pg2_fixed_table, {ref, Ref}),
-    Names = member_groups(Pid),
-    _ = [leave_group(Name, P) || 
-            Name <- Names,
-            P <- member_in_group(Pid, Name)],
-    %% Kept for backward compatibility with links. Can be removed, eventually.
-    _ = [gen_server:abcast(nodes(), ?MODULE, {del_member, Name, Pid}) ||
-            Name <- Names],
-    ok.
-
-join_group(Name, Pid) ->
-    Ref_Pid = {ref, Pid}, 
-    try _ = ets:update_counter(pg2_fixed_table, Ref_Pid, {4, +1})
-    catch _:_ ->
-            {RPid, Ref} = do_monitor(Pid),
-            true = ets:insert(pg2_fixed_table, {Ref_Pid, RPid, Ref, 1}),
-            true = ets:insert(pg2_fixed_table, {{ref, Ref}, Pid})
-    end,
-    Member_Name_Pid = {member, Name, Pid},
-    try _ = ets:update_counter(pg2_fixed_table, Member_Name_Pid, {2, +1, 1, 1})
-    catch _:_ ->
-            true = ets:insert(pg2_fixed_table, {Member_Name_Pid, 1}),
-            _ = [ets:insert(pg2_fixed_table, {{local_member, Name, Pid}}) ||
-                    node(Pid) =:= node()],
-            true = ets:insert(pg2_fixed_table, {{pid, Pid, Name}})
-    end.
-
-leave_group(Name, Pid) ->
-    Member_Name_Pid = {member, Name, Pid},
-    try ets:update_counter(pg2_fixed_table, Member_Name_Pid, {2, -1, 0, 0}) of
-        N ->
-            if 
-                N =:= 0 ->
-                    true = ets:delete(pg2_fixed_table, {pid, Pid, Name}),
-                    _ = [ets:delete(pg2_fixed_table, {local_member, Name, Pid}) ||
-                            node(Pid) =:= node()],
-                    true = ets:delete(pg2_fixed_table, Member_Name_Pid);
-                true ->
-                    ok
-            end,
-            Ref_Pid = {ref, Pid}, 
-            case ets:update_counter(pg2_fixed_table, Ref_Pid, {4, -1}) of
-                0 ->
-                    [{Ref_Pid,RPid,Ref,0}] = ets:lookup(pg2_fixed_table, Ref_Pid),
-                    true = ets:delete(pg2_fixed_table, {ref, Ref}),
-                    true = ets:delete(pg2_fixed_table, Ref_Pid),
-                    true = erlang:demonitor(Ref, [flush]),
-                    kill_monitor_proc(RPid, Pid);
-                _ ->
-                    ok
-            end
-    catch _:_ ->
-            ok
-    end.
-
-all_members() ->
-    [[G, group_members(G)] || G <- all_groups()].
-
-group_members(Name) ->
-    [P || 
-        [P, N] <- ets:match(pg2_fixed_table, {{member, Name, '$1'},'$2'}),
-        _ <- lists:seq(1, N)].
-
-local_group_members(Name) ->
-    [P || 
-        [Pid] <- ets:match(pg2_fixed_table, {{local_member, Name, '$1'}}),
-        P <- member_in_group(Pid, Name)].
-
-member_in_group(Pid, Name) ->
-    case ets:lookup(pg2_fixed_table, {member, Name, Pid}) of
-        [] -> [];
-        [{{member, Name, Pid}, N}] ->
-            lists:duplicate(N, Pid)
-    end.
-
-member_groups(Pid) ->
-    [Name || [Name] <- ets:match(pg2_fixed_table, {{pid, Pid, '$1'}})].
-
-all_groups() ->
-    [N || [N] <- ets:match(pg2_fixed_table, {{group,'$1'}})].
-
-ensure_started() ->
-    case whereis(?MODULE) of
-        undefined ->
-            C = {pg2_fixed, {?MODULE, start_link, []}, permanent,
-                 1000, worker, [?MODULE]},
-            supervisor:start_child(kernel_safe_sup, C);
-        Pg2_FixedPid ->
-            {ok, Pg2_FixedPid}
-    end.
-
-
-kill_monitor_proc(RPid, Pid) ->
-    case RPid of
-        Pid -> ok;
-        _   -> exit(RPid, kill)
-    end.
-
-%% When/if erlang:monitor() returns before trying to connect to the
-%% other node this function can be removed.
-do_monitor(Pid) ->
-    case (node(Pid) =:= node()) orelse lists:member(node(Pid), nodes()) of
-        true ->
-            %% Assume the node is still up
-            {Pid, erlang:monitor(process, Pid)};
-        false ->
-            F = fun() -> 
-                        Ref = erlang:monitor(process, Pid),
-                        receive 
-                            {'DOWN', Ref, process, Pid, _Info} ->
-                                exit(normal)
-                        end
-                end,
-            erlang:spawn_monitor(F)
-    end.
diff --git a/rabbitmq-server/src/rabbit_channel_interceptor.erl b/rabbitmq-server/src/rabbit_channel_interceptor.erl
deleted file mode 100644 (file)
index 25c5df8..0000000
+++ /dev/null
@@ -1,91 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
-%% Since the AMQP methods used here are queue related,
-%% maybe we want this to be a queue_interceptor.
-
--module(rabbit_channel_interceptor).
-
--include("rabbit_framing.hrl").
--include("rabbit.hrl").
-
--export([intercept_method/2]).
-
--ifdef(use_specs).
-
--type(intercept_method() :: rabbit_framing:amqp_method_name()).
--type(original_method() :: rabbit_framing:amqp_method_record()).
--type(processed_method() :: rabbit_framing:amqp_method_record()).
-
--callback description() -> [proplists:property()].
-
--callback intercept(original_method(), rabbit_types:vhost()) ->
-    processed_method() | rabbit_misc:channel_or_connection_exit().
-
-%% Whether the interceptor wishes to intercept the amqp method
--callback applies_to(intercept_method()) -> boolean().
-
--else.
-
--export([behaviour_info/1]).
-
-behaviour_info(callbacks) ->
-    [{description, 0}, {intercept, 2}, {applies_to, 1}];
-behaviour_info(_Other) ->
-    undefined.
-
--endif.
-
-%%----------------------------------------------------------------------------
-
-intercept_method(#'basic.publish'{} = M, _VHost) -> M;
-intercept_method(#'basic.ack'{}     = M, _VHost) -> M;
-intercept_method(#'basic.nack'{}    = M, _VHost) -> M;
-intercept_method(#'basic.reject'{}  = M, _VHost) -> M;
-intercept_method(#'basic.credit'{}  = M, _VHost) -> M;
-intercept_method(M, VHost) ->
-    intercept_method(M, VHost, select(rabbit_misc:method_record_type(M))).
-
-intercept_method(M, _VHost, []) ->
-    M;
-intercept_method(M, VHost, [I]) ->
-    M2 = I:intercept(M, VHost),
-    case validate_method(M, M2) of
-        true ->
-            M2;
-        _   ->
-            internal_error("Interceptor: ~p expected "
-                                "to return method: ~p but returned: ~p",
-                                [I, rabbit_misc:method_record_type(M),
-                                 rabbit_misc:method_record_type(M2)])
-    end;
-intercept_method(M, _VHost, Is) ->
-    internal_error("More than one interceptor for method: ~p -- ~p",
-                   [rabbit_misc:method_record_type(M), Is]).
-
-%% select the interceptors that apply to intercept_method().
-select(Method)  ->
-    [M || {_, M} <- rabbit_registry:lookup_all(channel_interceptor),
-          code:which(M) =/= non_existing,
-          M:applies_to(Method)].
-
-validate_method(M, M2) ->
-    rabbit_misc:method_record_type(M) =:= rabbit_misc:method_record_type(M2).
-
-%% keep dialyzer happy
--spec internal_error(string(), [any()]) -> no_return().
-internal_error(Format, Args) ->
-    rabbit_misc:protocol_error(internal_error, Format, Args).
diff --git a/rabbitmq-server/src/ssl_compat.erl b/rabbitmq-server/src/ssl_compat.erl
deleted file mode 100644 (file)
index fc83fbc..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(ssl_compat).
-
-%% We don't want warnings about the use of erlang:now/0 in
-%% this module.
--compile(nowarn_deprecated_function).
-
--export([connection_information/1,
-         connection_information/2]).
-
-connection_information(SslSocket) ->
-    try
-        ssl:connection_information(SslSocket)
-    catch
-        error:undef ->
-            case ssl:connection_info(SslSocket) of
-                {ok, {ProtocolVersion, CipherSuite}} ->
-                    {ok, [{protocol, ProtocolVersion},
-                          {cipher_suite, CipherSuite}]};
-                {error, Reason} ->
-                    {error, Reason}
-            end
-    end.
-
-connection_information(SslSocket, Items) ->
-    try
-        ssl:connection_information(SslSocket, Items)
-    catch
-        error:undef ->
-            WantProtocolVersion = lists:member(protocol, Items),
-            WantCipherSuite = lists:member(cipher_suite, Items),
-            if
-                WantProtocolVersion orelse WantCipherSuite ->
-                    case ssl:connection_info(SslSocket) of
-                        {ok, {ProtocolVersion, CipherSuite}} ->
-                            filter_information_items(ProtocolVersion,
-                                                     CipherSuite,
-                                                     Items,
-                                                     []);
-                        {error, Reason} ->
-                            {error, Reason}
-                    end;
-                true ->
-                    {ok, []}
-            end
-    end.
-
-filter_information_items(ProtocolVersion, CipherSuite, [protocol | Rest],
-  Result) ->
-    filter_information_items(ProtocolVersion, CipherSuite, Rest,
-      [{protocol, ProtocolVersion} | Result]);
-filter_information_items(ProtocolVersion, CipherSuite, [cipher_suite | Rest],
-  Result) ->
-    filter_information_items(ProtocolVersion, CipherSuite, Rest,
-      [{cipher_suite, CipherSuite} | Result]);
-filter_information_items(ProtocolVersion, CipherSuite, [_ | Rest],
-  Result) ->
-    filter_information_items(ProtocolVersion, CipherSuite, Rest, Result);
-filter_information_items(_ProtocolVersion, _CipherSuite, [], Result) ->
-    {ok, lists:reverse(Result)}.
diff --git a/rabbitmq-server/src/tcp_acceptor.erl b/rabbitmq-server/src/tcp_acceptor.erl
deleted file mode 100644 (file)
index 75f216c..0000000
+++ /dev/null
@@ -1,105 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(tcp_acceptor).
-
--behaviour(gen_server).
-
--export([start_link/2]).
-
--export([init/1, handle_call/3, handle_cast/2, handle_info/2,
-         terminate/2, code_change/3]).
-
--record(state, {callback, sock, ref}).
-
-%%--------------------------------------------------------------------
-
-start_link(Callback, LSock) ->
-    gen_server:start_link(?MODULE, {Callback, LSock}, []).
-
-%%--------------------------------------------------------------------
-
-init({Callback, LSock}) ->
-    gen_server:cast(self(), accept),
-    {ok, #state{callback=Callback, sock=LSock}}.
-
-handle_call(_Request, _From, State) ->
-    {noreply, State}.
-
-handle_cast(accept, State) ->
-    ok = file_handle_cache:obtain(),
-    accept(State);
-
-handle_cast(_Msg, State) ->
-    {noreply, State}.
-
-handle_info({inet_async, LSock, Ref, {ok, Sock}},
-            State = #state{callback={M,F,A}, sock=LSock, ref=Ref}) ->
-
-    %% patch up the socket so it looks like one we got from
-    %% gen_tcp:accept/1
-    {ok, Mod} = inet_db:lookup_socket(LSock),
-    inet_db:register_socket(Sock, Mod),
-
-    %% handle
-    case tune_buffer_size(Sock) of
-        ok                -> file_handle_cache:transfer(
-                               apply(M, F, A ++ [Sock])),
-                             ok = file_handle_cache:obtain();
-        {error, enotconn} -> catch port_close(Sock);
-        {error, Err}      -> {ok, {IPAddress, Port}} = inet:sockname(LSock),
-                             error_logger:error_msg(
-                               "failed to tune buffer size of "
-                               "connection accepted on ~s:~p - ~s~n",
-                               [rabbit_misc:ntoab(IPAddress), Port,
-                                rabbit_misc:format_inet_error(Err)]),
-                             catch port_close(Sock)
-    end,
-
-    %% accept more
-    accept(State);
-
-handle_info({inet_async, LSock, Ref, {error, Reason}},
-            State=#state{sock=LSock, ref=Ref}) ->
-    case Reason of
-        closed       -> {stop, normal, State}; %% listening socket closed
-        econnaborted -> accept(State); %% client sent RST before we accepted
-        _            -> {stop, {accept_failed, Reason}, State}
-    end;
-
-handle_info(_Info, State) ->
-    {noreply, State}.
-
-terminate(_Reason, _State) ->
-    ok.
-
-code_change(_OldVsn, State, _Extra) ->
-    {ok, State}.
-
-%%--------------------------------------------------------------------
-
-accept(State = #state{sock=LSock}) ->
-    case prim_inet:async_accept(LSock, -1) of
-        {ok, Ref} -> {noreply, State#state{ref=Ref}};
-        Error     -> {stop, {cannot_accept, Error}, State}
-    end.
-
-tune_buffer_size(Sock) ->
-    case inet:getopts(Sock, [sndbuf, recbuf, buffer]) of
-        {ok, BufSizes} -> BufSz = lists:max([Sz || {_Opt, Sz} <- BufSizes]),
-                          inet:setopts(Sock, [{buffer, BufSz}]);
-        Error          -> Error
-    end.
diff --git a/rabbitmq-server/src/tcp_acceptor_sup.erl b/rabbitmq-server/src/tcp_acceptor_sup.erl
deleted file mode 100644 (file)
index 22c886e..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(tcp_acceptor_sup).
-
--behaviour(supervisor).
-
--export([start_link/2]).
-
--export([init/1]).
-
-%%----------------------------------------------------------------------------
-
--ifdef(use_specs).
-
--type(mfargs() :: {atom(), atom(), [any()]}).
-
--spec(start_link/2 :: (atom(), mfargs()) -> rabbit_types:ok_pid_or_error()).
-
--endif.
-
-%%----------------------------------------------------------------------------
-
-start_link(Name, Callback) ->
-    supervisor:start_link({local,Name}, ?MODULE, Callback).
-
-init(Callback) ->
-    {ok, {{simple_one_for_one, 10, 10},
-          [{tcp_acceptor, {tcp_acceptor, start_link, [Callback]},
-            transient, brutal_kill, worker, [tcp_acceptor]}]}}.
diff --git a/rabbitmq-server/src/tcp_listener.erl b/rabbitmq-server/src/tcp_listener.erl
deleted file mode 100644 (file)
index 307249a..0000000
+++ /dev/null
@@ -1,98 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(tcp_listener).
-
--behaviour(gen_server).
-
--export([start_link/8]).
-
--export([init/1, handle_call/3, handle_cast/2, handle_info/2,
-         terminate/2, code_change/3]).
-
--record(state, {sock, on_startup, on_shutdown, label}).
-
-%%----------------------------------------------------------------------------
-
--ifdef(use_specs).
-
--type(mfargs() :: {atom(), atom(), [any()]}).
-
--spec(start_link/8 ::
-        (inet:ip_address(), inet:port_number(), [gen_tcp:listen_option()],
-         integer(), atom(), mfargs(), mfargs(), string()) ->
-                           rabbit_types:ok_pid_or_error()).
-
--endif.
-
-%%--------------------------------------------------------------------
-
-start_link(IPAddress, Port, SocketOpts,
-           ConcurrentAcceptorCount, AcceptorSup,
-           OnStartup, OnShutdown, Label) ->
-    gen_server:start_link(
-      ?MODULE, {IPAddress, Port, SocketOpts,
-                ConcurrentAcceptorCount, AcceptorSup,
-                OnStartup, OnShutdown, Label}, []).
-
-%%--------------------------------------------------------------------
-
-init({IPAddress, Port, SocketOpts,
-      ConcurrentAcceptorCount, AcceptorSup,
-      {M,F,A} = OnStartup, OnShutdown, Label}) ->
-    process_flag(trap_exit, true),
-    case gen_tcp:listen(Port, SocketOpts ++ [{ip, IPAddress},
-                                             {active, false}]) of
-        {ok, LSock} ->
-            lists:foreach(fun (_) ->
-                                  {ok, _APid} = supervisor:start_child(
-                                                  AcceptorSup, [LSock])
-                          end,
-                          lists:duplicate(ConcurrentAcceptorCount, dummy)),
-            {ok, {LIPAddress, LPort}} = inet:sockname(LSock),
-            error_logger:info_msg(
-              "started ~s on ~s:~p~n",
-              [Label, rabbit_misc:ntoab(LIPAddress), LPort]),
-            apply(M, F, A ++ [IPAddress, Port]),
-            {ok, #state{sock = LSock,
-                        on_startup = OnStartup, on_shutdown = OnShutdown,
-                        label = Label}};
-        {error, Reason} ->
-            error_logger:error_msg(
-              "failed to start ~s on ~s:~p - ~p (~s)~n",
-              [Label, rabbit_misc:ntoab(IPAddress), Port,
-               Reason, inet:format_error(Reason)]),
-            {stop, {cannot_listen, IPAddress, Port, Reason}}
-    end.
-
-handle_call(_Request, _From, State) ->
-    {noreply, State}.
-
-handle_cast(_Msg, State) ->
-    {noreply, State}.
-
-handle_info(_Info, State) ->
-    {noreply, State}.
-
-terminate(_Reason, #state{sock=LSock, on_shutdown = {M,F,A}, label=Label}) ->
-    {ok, {IPAddress, Port}} = inet:sockname(LSock),
-    gen_tcp:close(LSock),
-    error_logger:info_msg("stopped ~s on ~s:~p~n",
-                          [Label, rabbit_misc:ntoab(IPAddress), Port]),
-    apply(M, F, A ++ [IPAddress, Port]).
-
-code_change(_OldVsn, State, _Extra) ->
-    {ok, State}.
diff --git a/rabbitmq-server/src/tcp_listener_sup.erl b/rabbitmq-server/src/tcp_listener_sup.erl
deleted file mode 100644 (file)
index 94bdecc..0000000
+++ /dev/null
@@ -1,70 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(tcp_listener_sup).
-
--behaviour(supervisor).
-
--export([start_link/7, start_link/8]).
-
--export([init/1]).
-
-%%----------------------------------------------------------------------------
-
--ifdef(use_specs).
-
--type(mfargs() :: {atom(), atom(), [any()]}).
-
--spec(start_link/7 ::
-        (inet:ip_address(), inet:port_number(), [gen_tcp:listen_option()],
-         mfargs(), mfargs(), mfargs(), string()) ->
-                           rabbit_types:ok_pid_or_error()).
--spec(start_link/8 ::
-        (inet:ip_address(), inet:port_number(), [gen_tcp:listen_option()],
-         mfargs(), mfargs(), mfargs(), integer(), string()) ->
-                           rabbit_types:ok_pid_or_error()).
-
--endif.
-
-%%----------------------------------------------------------------------------
-
-start_link(IPAddress, Port, SocketOpts, OnStartup, OnShutdown,
-           AcceptCallback, Label) ->
-    start_link(IPAddress, Port, SocketOpts, OnStartup, OnShutdown,
-               AcceptCallback, 1, Label).
-
-start_link(IPAddress, Port, SocketOpts, OnStartup, OnShutdown,
-           AcceptCallback, ConcurrentAcceptorCount, Label) ->
-    supervisor:start_link(
-      ?MODULE, {IPAddress, Port, SocketOpts, OnStartup, OnShutdown,
-                AcceptCallback, ConcurrentAcceptorCount, Label}).
-
-init({IPAddress, Port, SocketOpts, OnStartup, OnShutdown,
-      AcceptCallback, ConcurrentAcceptorCount, Label}) ->
-    %% This is gross. The tcp_listener needs to know about the
-    %% tcp_acceptor_sup, and the only way I can think of accomplishing
-    %% that without jumping through hoops is to register the
-    %% tcp_acceptor_sup.
-    Name = rabbit_misc:tcp_name(tcp_acceptor_sup, IPAddress, Port),
-    {ok, {{one_for_all, 10, 10},
-          [{tcp_acceptor_sup, {tcp_acceptor_sup, start_link,
-                               [Name, AcceptCallback]},
-            transient, infinity, supervisor, [tcp_acceptor_sup]},
-           {tcp_listener, {tcp_listener, start_link,
-                           [IPAddress, Port, SocketOpts,
-                            ConcurrentAcceptorCount, Name,
-                            OnStartup, OnShutdown, Label]},
-            transient, 16#ffffffff, worker, [tcp_listener]}]}}.
diff --git a/rabbitmq-server/src/time_compat.erl b/rabbitmq-server/src/time_compat.erl
deleted file mode 100644 (file)
index b87c6cc..0000000
+++ /dev/null
@@ -1,305 +0,0 @@
-%%
-%% %CopyrightBegin%
-%% 
-%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
-%% 
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%%     http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%% 
-%% %CopyrightEnd%
-%%
-
-%%
-%% If your code need to be able to execute on ERTS versions both
-%% earlier and later than 7.0, the best approach is to use the new
-%% time API introduced in ERTS 7.0 and implement a fallback
-%% solution using the old primitives to be used on old ERTS
-%% versions. This way your code can automatically take advantage
-%% of the improvements in the API when available. This is an
-%% example of how to implement such an API, but it can be used
-%% as is if you want to. Just add (a preferrably renamed version of)
-%% this module to your project, and call the API via this module
-%% instead of calling the BIFs directly.
-%%
-
--module(time_compat).
-
-%% We don't want warnings about the use of erlang:now/0 in
-%% this module.
--compile(nowarn_deprecated_function).
-%%
-%% We don't use
-%%   -compile({nowarn_deprecated_function, [{erlang, now, 0}]}).
-%% since this will produce warnings when compiled on systems
-%% where it has not yet been deprecated.
-%%
-
--export([monotonic_time/0,
-        monotonic_time/1,
-        erlang_system_time/0,
-        erlang_system_time/1,
-        os_system_time/0,
-        os_system_time/1,
-        time_offset/0,
-        time_offset/1,
-        convert_time_unit/3,
-        timestamp/0,
-        unique_integer/0,
-        unique_integer/1,
-        monitor/2,
-        system_info/1,
-        system_flag/2]).
-
-monotonic_time() ->
-    try
-       erlang:monotonic_time()
-    catch
-       error:undef ->
-           %% Use Erlang system time as monotonic time
-           erlang_system_time_fallback()
-    end.
-
-monotonic_time(Unit) ->
-    try
-       erlang:monotonic_time(Unit)
-    catch
-       error:badarg ->
-           erlang:error(badarg, [Unit]);
-       error:undef ->
-           %% Use Erlang system time as monotonic time
-           STime = erlang_system_time_fallback(),
-           try
-               convert_time_unit_fallback(STime, native, Unit)
-           catch
-               error:bad_time_unit -> erlang:error(badarg, [Unit])
-           end
-    end.
-
-erlang_system_time() ->
-    try
-       erlang:system_time()
-    catch
-       error:undef ->
-           erlang_system_time_fallback()
-    end.
-
-erlang_system_time(Unit) ->
-    try
-       erlang:system_time(Unit)
-    catch
-       error:badarg ->
-           erlang:error(badarg, [Unit]);
-       error:undef ->
-           STime = erlang_system_time_fallback(),
-           try
-               convert_time_unit_fallback(STime, native, Unit)
-           catch
-               error:bad_time_unit -> erlang:error(badarg, [Unit])
-           end
-    end.
-
-os_system_time() ->
-    try
-       os:system_time()
-    catch
-       error:undef ->
-           os_system_time_fallback()
-    end.
-
-os_system_time(Unit) ->
-    try
-       os:system_time(Unit)
-    catch
-       error:badarg ->
-           erlang:error(badarg, [Unit]);
-       error:undef ->
-           STime = os_system_time_fallback(),
-           try
-               convert_time_unit_fallback(STime, native, Unit)
-           catch
-               error:bad_time_unit -> erlang:error(badarg, [Unit])
-           end
-    end.
-
-time_offset() ->
-    try
-       erlang:time_offset()
-    catch
-       error:undef ->
-           %% Erlang system time and Erlang monotonic
-           %% time are always aligned
-           0
-    end.
-
-time_offset(Unit) ->
-    try
-       erlang:time_offset(Unit)
-    catch
-       error:badarg ->
-           erlang:error(badarg, [Unit]);
-       error:undef ->
-           try
-               _ = integer_time_unit(Unit)
-           catch
-               error:bad_time_unit -> erlang:error(badarg, [Unit])
-           end,
-           %% Erlang system time and Erlang monotonic
-           %% time are always aligned
-           0
-    end.
-
-convert_time_unit(Time, FromUnit, ToUnit) ->
-    try
-       erlang:convert_time_unit(Time, FromUnit, ToUnit)
-    catch
-       error:undef ->
-           try
-               convert_time_unit_fallback(Time, FromUnit, ToUnit)
-           catch
-               _:_ ->
-                   erlang:error(badarg, [Time, FromUnit, ToUnit])
-           end;
-       error:Error ->
-           erlang:error(Error, [Time, FromUnit, ToUnit])
-    end.
-
-timestamp() ->
-    try
-       erlang:timestamp()
-    catch
-       error:undef ->
-           erlang:now()
-    end.
-
-unique_integer() ->
-    try
-       erlang:unique_integer()
-    catch
-       error:undef ->
-           {MS, S, US} = erlang:now(),
-           (MS*1000000+S)*1000000+US
-    end.
-
-unique_integer(Modifiers) ->
-    try
-       erlang:unique_integer(Modifiers)
-    catch
-       error:badarg ->
-           erlang:error(badarg, [Modifiers]);
-       error:undef ->
-           case is_valid_modifier_list(Modifiers) of
-               true ->
-                   %% now() converted to an integer
-                   %% fullfill the requirements of
-                   %% all modifiers: unique, positive,
-                   %% and monotonic...
-                   {MS, S, US} = erlang:now(),
-                   (MS*1000000+S)*1000000+US;
-               false ->
-                   erlang:error(badarg, [Modifiers])
-           end
-    end.
-
-monitor(Type, Item) ->
-    try
-       erlang:monitor(Type, Item)
-    catch
-       error:Error ->
-           case {Error, Type, Item} of
-               {badarg, time_offset, clock_service} ->
-                   %% Time offset is final and will never change.
-                   %% Return a dummy reference, there will never
-                   %% be any need for 'CHANGE' messages...
-                   make_ref();
-               _ ->
-                   erlang:error(Error, [Type, Item])
-           end
-    end.
-
-system_info(Item) ->
-    try
-       erlang:system_info(Item)
-    catch
-       error:badarg ->
-           case Item of
-               time_correction ->
-                   case erlang:system_info(tolerant_timeofday) of
-                       enabled -> true;
-                       disabled -> false
-                   end;
-               time_warp_mode ->
-                   no_time_warp;
-               time_offset ->
-                   final;
-               NotSupArg when NotSupArg == os_monotonic_time_source;
-                              NotSupArg == os_system_time_source;
-                              NotSupArg == start_time;
-                              NotSupArg == end_time ->
-                   %% Cannot emulate this...
-                   erlang:error(notsup, [NotSupArg]);
-               _ ->
-                   erlang:error(badarg, [Item])
-           end;
-       error:Error ->
-           erlang:error(Error, [Item])
-    end.
-
-system_flag(Flag, Value) ->
-    try
-       erlang:system_flag(Flag, Value)
-    catch
-       error:Error ->
-           case {Error, Flag, Value} of
-               {badarg, time_offset, finalize} ->
-                   %% Time offset is final
-                   final;
-               _ ->
-                   erlang:error(Error, [Flag, Value])
-           end
-    end.
-
-%%
-%% Internal functions
-%%
-
-integer_time_unit(native) -> 1000*1000;
-integer_time_unit(nano_seconds) -> 1000*1000*1000;
-integer_time_unit(micro_seconds) -> 1000*1000;
-integer_time_unit(milli_seconds) -> 1000;
-integer_time_unit(seconds) -> 1;
-integer_time_unit(I) when is_integer(I), I > 0 -> I;
-integer_time_unit(BadRes) -> erlang:error(bad_time_unit, [BadRes]).
-
-erlang_system_time_fallback() ->
-    {MS, S, US} = erlang:now(),
-    (MS*1000000+S)*1000000+US.
-
-os_system_time_fallback() ->
-    {MS, S, US} = os:timestamp(),
-    (MS*1000000+S)*1000000+US.
-
-convert_time_unit_fallback(Time, FromUnit, ToUnit) ->
-    FU = integer_time_unit(FromUnit),
-    TU = integer_time_unit(ToUnit),
-    case Time < 0 of
-       true -> TU*Time - (FU - 1);
-       false -> TU*Time
-    end div FU.
-
-is_valid_modifier_list([positive|Ms]) ->
-    is_valid_modifier_list(Ms);
-is_valid_modifier_list([monotonic|Ms]) ->
-    is_valid_modifier_list(Ms);
-is_valid_modifier_list([]) ->
-    true;
-is_valid_modifier_list(_) ->
-    false.
diff --git a/rabbitmq-server/src/worker_pool_sup.erl b/rabbitmq-server/src/worker_pool_sup.erl
deleted file mode 100644 (file)
index 99afd91..0000000
+++ /dev/null
@@ -1,53 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at http://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
-%%
-
--module(worker_pool_sup).
-
--behaviour(supervisor).
-
--export([start_link/0, start_link/1]).
-
--export([init/1]).
-
-%%----------------------------------------------------------------------------
-
--ifdef(use_specs).
-
--spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
--spec(start_link/1 :: (non_neg_integer()) -> rabbit_types:ok_pid_or_error()).
-
--endif.
-
-%%----------------------------------------------------------------------------
-
--define(SERVER, ?MODULE).
-
-%%----------------------------------------------------------------------------
-
-start_link() ->
-    start_link(erlang:system_info(schedulers)).
-
-start_link(WCount) ->
-    supervisor:start_link({local, ?SERVER}, ?MODULE, [WCount]).
-
-%%----------------------------------------------------------------------------
-
-init([WCount]) ->
-    {ok, {{one_for_one, 10, 10},
-          [{worker_pool, {worker_pool, start_link, []}, transient,
-            16#ffffffff, worker, [worker_pool]} |
-           [{N, {worker_pool_worker, start_link, []}, transient, 16#ffffffff,
-             worker, [worker_pool_worker]} || N <- lists:seq(1, WCount)]]}}.
diff --git a/rabbitmq-server/version.mk b/rabbitmq-server/version.mk
deleted file mode 100644 (file)
index cc5e402..0000000
+++ /dev/null
@@ -1 +0,0 @@
-VERSION?=3.5.6
old mode 100755 (executable)
new mode 100644 (file)
similarity index 63%
rename from debian/rabbitmq-script-wrapper
rename to scripts/rabbitmq-script-wrapper
index a622ae2..9623f01
 ##  The Original Code is RabbitMQ.
 ##
 ##  The Initial Developer of the Original Code is GoPivotal, Inc.
-##  Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+##  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
 ##
 
-# Escape spaces and quotes, because shell is revolting.
 for arg in "$@" ; do
-       # Escape quotes in parameters, so that they're passed through cleanly.
-       arg=$(sed -e 's/"/\\"/g' <<-END
-               $arg
-               END
-       )
-       CMDLINE="${CMDLINE} \"${arg}\""
+    # Wrap each arg in single quotes and wrap single quotes in double quotes, so that they're passed through cleanly.
+    arg=`printf %s "$arg" | sed -e "s#'#'\"'\"'#g"`
+    CMDLINE="${CMDLINE} '${arg}'"
 done
 
 cd /var/lib/rabbitmq
@@ -30,14 +26,18 @@ cd /var/lib/rabbitmq
 SCRIPT=`basename $0`
 
 if [ `id -u` = `id -u rabbitmq` -a "$SCRIPT" = "rabbitmq-server" ] ; then
-    /usr/lib/rabbitmq/bin/rabbitmq-server "$@"  > "/var/log/rabbitmq/startup_log" 2> "/var/log/rabbitmq/startup_err"
+    RABBITMQ_ENV=/usr/lib/rabbitmq/bin/rabbitmq-env
+    RABBITMQ_SCRIPTS_DIR=$(dirname "$RABBITMQ_ENV")
+    . "$RABBITMQ_ENV"
+
+    exec /usr/lib/rabbitmq/bin/rabbitmq-server "$@" @STDOUT_STDERR_REDIRECTION@
 elif [ `id -u` = `id -u rabbitmq` -o "$SCRIPT" = "rabbitmq-plugins" ] ; then
     if [ -f $PWD/.erlang.cookie ] ; then
         export HOME=.
     fi
-    /usr/lib/rabbitmq/bin/${SCRIPT} "$@"
+    exec /usr/lib/rabbitmq/bin/${SCRIPT} "$@"
 elif [ `id -u` = 0 ] ; then
-    su rabbitmq -s /bin/sh -c "/usr/lib/rabbitmq/bin/${SCRIPT} ${CMDLINE}"
+    @SU_RABBITMQ_SH_C@ "/usr/lib/rabbitmq/bin/${SCRIPT} ${CMDLINE}"
 else
     /usr/lib/rabbitmq/bin/${SCRIPT}
     echo
diff --git a/scripts/rabbitmq-server-ha.ocf b/scripts/rabbitmq-server-ha.ocf
new file mode 100755 (executable)
index 0000000..49f3af7
--- /dev/null
@@ -0,0 +1,2365 @@
+#!/bin/sh
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# See usage() function below for more details ...
+#
+# Note that the script uses an external file to setup RabbitMQ policies
+# so make sure to create it from an example shipped with the package.
+#
+#######################################################################
+# Initialization:
+
+: ${OCF_FUNCTIONS_DIR=${OCF_ROOT}/lib/heartbeat}
+. ${OCF_FUNCTIONS_DIR}/ocf-shellfuncs
+
+#######################################################################
+
+# Fill in some defaults if no values are specified
+
+PATH=/sbin:/usr/sbin:/bin:/usr/bin
+
+OCF_RESKEY_binary_default="/usr/sbin/rabbitmq-server"
+OCF_RESKEY_ctl_default="/usr/sbin/rabbitmqctl"
+OCF_RESKEY_debug_default=false
+OCF_RESKEY_username_default="rabbitmq"
+OCF_RESKEY_groupname_default="rabbitmq"
+OCF_RESKEY_admin_user_default="guest"
+OCF_RESKEY_admin_password_default="guest"
+OCF_RESKEY_definitions_dump_file_default="/etc/rabbitmq/definitions"
+OCF_RESKEY_pid_file_default="/var/run/rabbitmq/pid"
+OCF_RESKEY_log_dir_default="/var/log/rabbitmq"
+OCF_RESKEY_mnesia_base_default="/var/lib/rabbitmq/mnesia"
+OCF_RESKEY_mnesia_schema_base_default="/var/lib/rabbitmq"
+OCF_RESKEY_host_ip_default="127.0.0.1"
+OCF_RESKEY_node_port_default=5672
+OCF_RESKEY_erlang_cookie_default=false
+OCF_RESKEY_erlang_cookie_file_default="/var/lib/rabbitmq/.erlang.cookie"
+OCF_RESKEY_use_fqdn_default=false
+OCF_RESKEY_fqdn_prefix_default=""
+OCF_RESKEY_max_rabbitmqctl_timeouts_default=3
+OCF_RESKEY_policy_file_default="/usr/local/sbin/set_rabbitmq_policy"
+OCF_RESKEY_rmq_feature_health_check_default=true
+OCF_RESKEY_rmq_feature_local_list_queues_default=true
+
+: ${HA_LOGTAG="lrmd"}
+: ${HA_LOGFACILITY="daemon"}
+: ${OCF_RESKEY_binary=${OCF_RESKEY_binary_default}}
+: ${OCF_RESKEY_ctl=${OCF_RESKEY_ctl_default}}
+: ${OCF_RESKEY_debug=${OCF_RESKEY_debug_default}}
+: ${OCF_RESKEY_username=${OCF_RESKEY_username_default}}
+: ${OCF_RESKEY_groupname=${OCF_RESKEY_groupname_default}}
+: ${OCF_RESKEY_admin_user=${OCF_RESKEY_admin_user_default}}
+: ${OCF_RESKEY_admin_password=${OCF_RESKEY_admin_password_default}}
+: ${OCF_RESKEY_definitions_dump_file=${OCF_RESKEY_definitions_dump_file_default}}
+: ${OCF_RESKEY_log_dir=${OCF_RESKEY_log_dir_default}}
+: ${OCF_RESKEY_mnesia_base=${OCF_RESKEY_mnesia_base_default}}
+: ${OCF_RESKEY_mnesia_schema_base=${OCF_RESKEY_mnesia_schema_base_default}}
+: ${OCF_RESKEY_pid_file=${OCF_RESKEY_pid_file_default}}
+: ${OCF_RESKEY_node_port=${OCF_RESKEY_node_port_default}}
+: ${OCF_RESKEY_erlang_cookie=${OCF_RESKEY_erlang_cookie_default}}
+: ${OCF_RESKEY_erlang_cookie_file=${OCF_RESKEY_erlang_cookie_file_default}}
+: ${OCF_RESKEY_use_fqdn=${OCF_RESKEY_use_fqdn_default}}
+: ${OCF_RESKEY_fqdn_prefix=${OCF_RESKEY_fqdn_prefix_default}}
+: ${OCF_RESKEY_max_rabbitmqctl_timeouts=${OCF_RESKEY_max_rabbitmqctl_timeouts_default}}
+: ${OCF_RESKEY_policy_file=${OCF_RESKEY_policy_file_default}}
+: ${OCF_RESKEY_rmq_feature_health_check=${OCF_RESKEY_rmq_feature_health_check_default}}
+: ${OCF_RESKEY_rmq_feature_local_list_queues=${OCF_RESKEY_rmq_feature_local_list_queues_default}}
+
+#######################################################################
+
+OCF_RESKEY_start_time_default=$((OCF_RESKEY_CRM_meta_timeout / 6000 + 2))
+: ${OCF_RESKEY_start_time=${OCF_RESKEY_start_time_default}}
+OCF_RESKEY_stop_time_default=${OCF_RESKEY_start_time_default}
+: ${OCF_RESKEY_stop_time=${OCF_RESKEY_start_time_default}}
+OCF_RESKEY_command_timeout_default=""
+: ${OCF_RESKEY_command_timeout=${OCF_RESKEY_command_timeout_default}}
+TIMEOUT_ARG=$((OCF_RESKEY_CRM_meta_timeout / 6000 + 30))
+COMMAND_TIMEOUT="/usr/bin/timeout ${OCF_RESKEY_command_timeout} ${TIMEOUT_ARG}"
+
+#######################################################################
+
+usage() {
+    cat <<UEND
+        usage: $0 (start|stop|validate-all|meta-data|status|monitor)
+
+        $0 manages an ${OCF_RESKEY_binary} process as an HA resource
+
+        The 'start' operation starts the networking service.
+        The 'stop' operation stops the networking service.
+        The 'validate-all' operation reports whether the parameters are valid
+        The 'meta-data' operation reports this RA's meta-data information
+        The 'status' operation reports whether the networking service is running
+        The 'monitor' operation reports whether the networking service seems to be working
+
+UEND
+}
+
+meta_data() {
+    # The EXTENDED_OCF_PARAMS parameter below does not exist by default
+    # and hence converted to an empty string unless overridden. It
+    # could be used by an extention script to add new parameters. For
+    # example see https://review.openstack.org/#/c/249180/10
+
+    cat <<END
+<?xml version="1.0"?>
+<!DOCTYPE resource-agent SYSTEM "ra-api-1.dtd">
+<resource-agent name="${OCF_RESKEY_binary}">
+<version>1.0</version>
+
+<longdesc lang="en">
+Resource agent for ${OCF_RESKEY_binary}
+</longdesc>
+<shortdesc lang="en">Resource agent for ${OCF_RESKEY_binary}</shortdesc>
+<parameters>
+
+<parameter name="binary" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ binary
+</longdesc>
+<shortdesc lang="en">RabbitMQ binary</shortdesc>
+<content type="string" default="${OCF_RESKEY_binary_default}" />
+</parameter>
+
+<parameter name="ctl" unique="0" required="0">
+<longdesc lang="en">
+rabbitctl binary
+</longdesc>
+<shortdesc lang="en">rabbitctl binary binary</shortdesc>
+<content type="string" default="${OCF_RESKEY_ctl_default}" />
+</parameter>
+
+<parameter name="pid_file" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ PID file
+</longdesc>
+<shortdesc lang="en">RabbitMQ PID file</shortdesc>
+<content type="string" default="${OCF_RESKEY_pid_file_default}" />
+</parameter>
+
+<parameter name="log_dir" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ log directory
+</longdesc>
+<shortdesc lang="en">RabbitMQ log directory</shortdesc>
+<content type="string" default="${OCF_RESKEY_log_dir_default}" />
+</parameter>
+
+<parameter name="username" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ user name
+</longdesc>
+<shortdesc lang="en">RabbitMQ user name</shortdesc>
+<content type="string" default="${OCF_RESKEY_username_default}" />
+</parameter>
+
+<parameter name="groupname" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ group name
+</longdesc>
+<shortdesc lang="en">RabbitMQ group name</shortdesc>
+<content type="string" default="${OCF_RESKEY_groupname_default}" />
+</parameter>
+
+<parameter name="admin_user" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ default admin user for API
+</longdesc>
+<shortdesc lang="en">RabbitMQ admin user</shortdesc>
+<content type="string" default="${OCF_RESKEY_admin_user_default}" />
+</parameter>
+
+<parameter name="admin_password" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ default admin user password for API
+</longdesc>
+<shortdesc lang="en">RabbitMQ admin password</shortdesc>
+<content type="string" default="${OCF_RESKEY_admin_password_default}" />
+</parameter>
+
+<parameter name="definitions_dump_file" unique="0" required="0">
+<longdesc lang="en">
+RabbitMQ default definitions dump file
+</longdesc>
+<shortdesc lang="en">RabbitMQ definitions dump file</shortdesc>
+<content type="string" default="${OCF_RESKEY_definitions_dump_file}" />
+</parameter>
+
+<parameter name="command_timeout" unique="0" required="0">
+<longdesc lang="en">
+Timeout command arguments for issued commands termination (value is auto evaluated)
+</longdesc>
+<shortdesc lang="en">Arguments for timeout wrapping command</shortdesc>
+<content type="string" default="${OCF_RESKEY_command_timeout_default}" />
+</parameter>
+
+<parameter name="start_time" unique="0" required="0">
+<longdesc lang="en">
+Timeout for start rabbitmq server
+</longdesc>
+<shortdesc lang="en">Timeout for start rabbitmq server</shortdesc>
+<content type="string" default="${OCF_RESKEY_start_time_default}" />
+</parameter>
+
+<parameter name="stop_time" unique="0" required="0">
+<longdesc lang="en">
+Timeout for stopping rabbitmq server
+</longdesc>
+<shortdesc lang="en">Timeout for stopping rabbitmq server</shortdesc>
+<content type="string" default="${OCF_RESKEY_stop_time_default}" />
+</parameter>
+
+<parameter name="debug" unique="0" required="0">
+<longdesc lang="en">
+The debug flag for agent (${OCF_RESKEY_binary}) instance.
+In the /tmp/ directory will be created rmq-* files for log
+some operations and ENV values inside OCF-script.
+</longdesc>
+<shortdesc lang="en">AMQP server (${OCF_RESKEY_binary}) debug flag</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_debug_default}" />
+</parameter>
+
+<parameter name="mnesia_base" unique="0" required="0">
+<longdesc lang="en">
+Base directory for storing Mnesia files
+</longdesc>
+<shortdesc lang="en">Base directory for storing Mnesia files</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_mnesia_base_default}" />
+</parameter>
+
+<parameter name="mnesia_schema_base" unique="0" required="0">
+<longdesc lang="en">
+Parent directory for Mnesia schema directory
+</longdesc>
+<shortdesc lang="en">Parent directory for Mnesia schema directory</shortdesc>
+<content type="string" default="${OCF_RESKEY_mnesia_schema_base_default}" />
+</parameter>
+
+<parameter name="host_ip" unique="0" required="0">
+<longdesc lang="en">
+${OCF_RESKEY_binary} should listen on this IP address
+</longdesc>
+<shortdesc lang="en">${OCF_RESKEY_binary} should listen on this IP address</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_host_ip_default}" />
+</parameter>
+
+<parameter name="node_port" unique="0" required="0">
+<longdesc lang="en">
+${OCF_RESKEY_binary} should listen on this port
+</longdesc>
+<shortdesc lang="en">${OCF_RESKEY_binary} should listen on this port</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_node_port_default}" />
+</parameter>
+
+<parameter name="erlang_cookie" unique="0" required="0">
+<longdesc lang="en">
+Erlang cookie for clustering. If specified, will be updated at the mnesia reset
+</longdesc>
+<shortdesc lang="en">Erlang cookie</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_erlang_cookie_default}" />
+</parameter>
+
+<parameter name="erlang_cookie_file" unique="0" required="0">
+<longdesc lang="en">
+Erlang cookie file path where the cookie will be put, if requested
+</longdesc>
+<shortdesc lang="en">Erlang cookie file</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_erlang_cookie_file_default}" />
+</parameter>
+
+<parameter name="use_fqdn" unique="0" required="0">
+<longdesc lang="en">
+Either to use FQDN or a shortname for the rabbitmq node
+</longdesc>
+<shortdesc lang="en">Use FQDN</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_use_fqdn_default}" />
+</parameter>
+
+<parameter name="fqdn_prefix" unique="0" required="0">
+<longdesc lang="en">
+Optional FQDN prefix for RabbitMQ nodes in cluster.
+FQDN prefix can be specified to host multiple RabbitMQ instances on a node or
+in case of RabbitMQ running in dedicated network/interface.
+</longdesc>
+<shortdesc lang="en">FQDN prefix</shortdesc>
+<content type="string" default="${OCF_RESKEY_fqdn_prefix_default}" />
+</parameter>
+
+<parameter name="max_rabbitmqctl_timeouts" unique="0" required="0">
+<longdesc lang="en">
+If during monitor call rabbitmqctl times out, the timeout is ignored
+unless it is Nth timeout in a row. Here N is the value of the current parameter.
+If too many timeouts happen in a raw, the monitor call will return with error.
+</longdesc>
+<shortdesc lang="en">Fail only if that many rabbitmqctl timeouts in a row occurred</shortdesc>
+<content type="string" default="${OCF_RESKEY_max_rabbitmqctl_timeouts_default}" />
+</parameter>
+
+<parameter name="policy_file" unique="0" required="0">
+<longdesc lang="en">
+A path to the shell script to setup RabbitMQ policies
+</longdesc>
+<shortdesc lang="en">A policy file path</shortdesc>
+<content type="string" default="${OCF_RESKEY_policy_file_default}" />
+</parameter>
+
+<parameter name="rmq_feature_health_check" unique="0" required="0">
+<longdesc lang="en">
+Since rabbit 3.6.4 list_queues/list_channels-based monitoring should
+be replaced with "node_health_check" command, as it creates no network
+load at all.
+</longdesc>
+<shortdesc lang="en">Use node_health_check for monitoring</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_rmq_feature_health_check_default}" />
+</parameter>
+
+<parameter name="rmq_feature_local_list_queues" unique="0" required="0">
+<longdesc lang="en">
+For rabbit version that implements --local flag for list_queues, this
+can greatly reduce network overhead in cases when node is
+stopped/demoted.
+</longdesc>
+<shortdesc lang="en">Use --local option for list_queues</shortdesc>
+<content type="boolean" default="${OCF_RESKEY_rmq_feature_local_list_queues_default}" />
+</parameter>
+
+$EXTENDED_OCF_PARAMS
+
+</parameters>
+
+<actions>
+<action name="start" timeout="20" />
+<action name="stop" timeout="20" />
+<action name="status" timeout="20" />
+<action name="monitor" depth="0" timeout="30" interval="5" />
+<action name="monitor" depth="0" timeout="30" interval="3" role="Master"/>
+<action name="promote" timeout="30" />
+<action name="demote"  timeout="30" />
+<action name="notify"   timeout="20" />
+<action name="validate-all" timeout="5" />
+<action name="meta-data" timeout="5" />
+</actions>
+</resource-agent>
+END
+}
+
+
+MIN_MASTER_SCORE=100
+BEST_MASTER_SCORE=1000
+
+
+#######################################################################
+# Functions invoked by resource manager actions
+
+#TODO(bogdando) move proc_kill, proc_stop to shared OCF functions
+#  to be shipped with HA cluster packages
+###########################################################
+# Attempts to kill a process with retries and checks procfs
+# to make sure the process is stopped.
+#
+# Globals:
+#   LL
+# Arguments:
+#   $1 - pid of the process to try and kill
+#   $2 - service name used for logging and match-based kill, if the pid is "none"
+#   $3 - signal to use, defaults to SIGTERM
+#   $4 - number of retries, defaults to 5
+#   $5 - time to sleep between retries, defaults to 2
+# Returns:
+#   0 - if successful
+#   1 - if process is still running according to procfs
+#   2 - if invalid parameters passed in
+###########################################################
+proc_kill()
+{
+    local pid="${1}"
+    local service_name="${2}"
+    local signal="${3:-SIGTERM}"
+    local count="${4:-5}"
+    local process_sleep="${5:-2}"
+    local LH="${LL} proc_kill():"
+    local pgrp="$(ps -o pgid= ${pid} 2>/dev/null | tr -d '[[:space:]]')"
+
+    if [ "${pid}" -a "${pgrp}" = "1" ] ; then
+        ocf_log err "${LH} shall not kill by the bad pid 1 (init)!"
+        return 2
+    fi
+
+    if [ "${pid}" = "none" ]; then
+        local matched
+        matched="$(pgrep -fla ${service_name})"
+        if [ -z "${matched}" ] ; then
+            ocf_log info "${LH} cannot find any processes matching the ${service_name}, considering target process to be already dead"
+            return 0
+        fi
+        ocf_log debug "${LH} no pid provided, will try the ${service_name}, matched list: ${matched}"
+        while [ $count -gt 0 ]; do
+            if [ -z "${matched}" ]; then
+                break
+            else
+                matched="$(pgrep -fla ${service_name})"
+                ocf_log debug "${LH} Stopping ${service_name} with ${signal}..."
+                ocf_run pkill -f -"${signal}" "${service_name}"
+            fi
+            sleep $process_sleep
+            count=$(( count-1 ))
+        done
+        pgrep -f "${service_name}" > /dev/null
+        if [ $? -ne 0 ] ; then
+            ocf_log debug "${LH} Stopped ${service_name} with ${signal}"
+            return 0
+        else
+            ocf_log warn "${LH} Failed to stop ${service_name} with ${signal}"
+            return 1
+        fi
+    else
+    # pid is not none
+        while [ $count -gt 0 ]; do
+            if [ ! -d "/proc/${pid}" ]; then
+                break
+            else
+                ocf_log debug "${LH} Stopping ${service_name} with ${signal}..."
+                ocf_run pkill -"${signal}" -g "${pgrp}"
+            fi
+            sleep $process_sleep
+            count=$(( count-1 ))
+        done
+
+        # Check if the process ended after the last sleep
+        if [ ! -d "/proc/${pid}" ] ; then
+            ocf_log debug "${LH} Stopped ${service_name} with ${signal}"
+            return 0
+        fi
+
+        ocf_log warn "${LH} Failed to stop ${service_name} with ${signal}"
+        return 1
+    fi
+}
+
+###########################################################
+# Attempts to kill a process with the given pid or pid file
+# using proc_kill and will retry with sigkill if sigterm is
+# unsuccessful.
+#
+# Globals:
+#   OCF_ERR_GENERIC
+#   OCF_SUCCESS
+#   LL
+# Arguments:
+#   $1 - pidfile or pid or 'none', if stopping by the name matching
+#   $2 - service name used for logging or for the failback stopping method
+#   $3 - stop process timeout (in sec), used to determine how many times we try
+#        SIGTERM and an upper limit on how long this function should try and
+#        stop the process. Defaults to 15.
+# Returns:
+#   OCF_SUCCESS - if successful
+#   OCF_ERR_GENERIC - if process is still running according to procfs
+###########################################################
+proc_stop()
+{
+    local pid_param="${1}"
+    local service_name="${2}"
+    local timeout="${3:-15}"
+    local LH="${LL} proc_stop():"
+    local i
+    local pid
+    local pidfile
+    if [ "${pid_param}" = "none" ] ; then
+        pid="none"
+    else
+        # check if provide just a number
+        echo "${pid_param}" | egrep -q '^[0-9]+$'
+        if [ $? -eq 0 ]; then
+            pid="${pid_param}"
+        elif [ -e "${pid_param}" ]; then # check if passed in a pid file
+            pidfile="${pid_param}"
+            pid=$(cat "${pidfile}" 2>/dev/null | tr -s " " "\n" | sort -u)
+        else
+            ocf_log warn "${LH} pid param ${pid_param} is not a file or a number, try match by ${service_name}"
+            pid="none"
+        fi
+    fi
+    # number of times to try a SIGTEM is (timeout - 5 seconds) / 2 seconds
+    local stop_count=$(( ($timeout-5)/2 ))
+
+    # make sure we stop at least once
+    if [ $stop_count -le 0 ]; then
+        stop_count=1
+    fi
+
+    if [ -z "${pid}" ] ; then
+        ocf_log warn "${LH} unable to get PID from ${pidfile}, try match by ${service_name}"
+        pid="none"
+    fi
+
+    if [ -n "${pid}" ]; then
+        for i in ${pid} ; do
+            [ "${i}" ] || break
+            ocf_log info "${LH} Stopping ${service_name} by PID ${i}"
+            proc_kill "${i}" "${service_name}" SIGTERM $stop_count
+            if [ $? -ne 0 ]; then
+                # SIGTERM failed, send a single SIGKILL
+                proc_kill "${i}" "${service_name}" SIGKILL 1 2
+                if [ $? -ne 0 ]; then
+                    ocf_log err "${LH} ERROR: could not stop ${service_name}"
+                    return "${OCF_ERR_GENERIC}"
+                fi
+            fi
+        done
+    fi
+
+    # Remove the pid file here which will remove empty pid files as well
+    if [ -n "${pidfile}" ]; then
+        rm -f "${pidfile}"
+    fi
+
+    ocf_log info "${LH} Stopped ${service_name}"
+    return "${OCF_SUCCESS}"
+}
+
+# Invokes the given command as a rabbitmq user and wrapped in the
+# timeout command.
+su_rabbit_cmd() {
+    local timeout
+    if [ "$1" = "-t" ]; then
+      timeout="/usr/bin/timeout ${OCF_RESKEY_command_timeout} $2"
+      shift 2
+    else
+      timeout=$COMMAND_TIMEOUT
+    fi
+    local cmd="${1:-status}"
+    local LH="${LL} su_rabbit_cmd():"
+    local rc=1
+    local user=$OCF_RESKEY_username
+    local mail=/var/spool/mail/rabbitmq
+    local pwd=/var/lib/rabbitmq
+    local home=/var/lib/rabbitmq
+
+    ocf_log debug "${LH} invoking a command: ${cmd}"
+    su $user -s /bin/sh -c "USER=${user} MAIL=${mail} PWD=${pwd} HOME=${home} LOGNAME=${user} \
+      ${timeout} ${cmd}"
+    rc=$?
+    ocf_log info "${LH} the invoked command exited ${rc}: ${cmd}"
+    return $rc
+}
+
+now() {
+    date -u +%s
+}
+
+master_score() {
+    local LH="${LL} master_score():"
+    local score=$1
+    if [ -z $score ] ; then
+        score=0
+    fi
+    ocf_log info "${LH} Updating master score attribute with ${score}"
+    ocf_run crm_master -l reboot -v $score || return $OCF_ERR_GENERIC
+    return $OCF_SUCCESS
+}
+
+# Return either FQDN or shortname, depends on the OCF_RESKEY_use_fqdn.
+get_hostname() {
+    if [ "${OCF_RESKEY_use_fqdn}" = 'false' ] ; then
+        echo "$(hostname -s)"
+    else
+        echo "$(hostname -f)"
+    fi
+}
+
+# Strip the FQDN to the shortname, if OCF_RESKEY_use_fqdn was set;
+# Prepend prefix to the hostname
+process_fqdn() {
+    if [ "${OCF_RESKEY_use_fqdn}" = 'false' ] ; then
+        echo "${OCF_RESKEY_fqdn_prefix}$1" | awk -F. '{print $1}'
+    else
+        echo "${OCF_RESKEY_fqdn_prefix}$1"
+    fi
+}
+
+# Return OCF_SUCCESS, if current host is in the list of given hosts.
+# Otherwise, return 10
+my_host() {
+    local hostlist="$1"
+    local hostname
+    local hn
+    local rc=10
+    local LH="${LL} my_host():"
+
+    hostname=$(process_fqdn $(get_hostname))
+    ocf_log info "${LH} hostlist is: $hostlist"
+    for host in $hostlist ; do
+        hn=$(process_fqdn "${host}")
+        ocf_log debug "${LH} comparing '$hostname' with '$hn'"
+        if [ "${hostname}" = "${hn}" ] ; then
+            rc=$OCF_SUCCESS
+            break
+        fi
+    done
+
+    return $rc
+}
+
+get_integer_node_attr() {
+    local value
+    value=$(crm_attribute -N $1 -l reboot --name "$2" --query 2>/dev/null | awk '{ split($3, vals, "="); if (vals[2] != "(null)") print vals[2] }')
+    if [ $? -ne 0 -o -z "$value" ] ; then
+        value=0
+    fi
+    echo $value
+}
+
+get_node_start_time() {
+    get_integer_node_attr $1 'rabbit-start-time'
+}
+
+get_node_master_score() {
+    get_integer_node_attr $1 'master-p_rabbitmq-server'
+}
+
+# Return either rabbit node name as FQDN or shortname, depends on the OCF_RESKEY_use_fqdn.
+rabbit_node_name() {
+    echo "rabbit@$(process_fqdn $1)"
+}
+
+rmq_setup_env() {
+    local H
+    local dir
+    H="$(get_hostname)"
+    export RABBITMQ_NODENAME=$(rabbit_node_name $H)
+    export RABBITMQ_NODE_PORT=$OCF_RESKEY_node_port
+    export RABBITMQ_PID_FILE=$OCF_RESKEY_pid_file
+    MNESIA_FILES="${OCF_RESKEY_mnesia_base}/$(rabbit_node_name $H)"
+    RMQ_START_TIME="${MNESIA_FILES}/ocf_server_start_time.txt"
+    MASTER_FLAG_FILE="${MNESIA_FILES}/ocf_master_for_${OCF_RESOURCE_INSTANCE}"
+    THIS_PCMK_NODE=`crm_node -n`
+    TOTALVMEM=`free -mt | awk '/Total:/ {print $2}'`
+    # check and make PID file dir
+    local PID_DIR=$( dirname $OCF_RESKEY_pid_file )
+    if [ ! -d ${PID_DIR} ] ; then
+        mkdir -p ${PID_DIR}
+        chown -R ${OCF_RESKEY_username}:${OCF_RESKEY_groupname} ${PID_DIR}
+        chmod 755 ${PID_DIR}
+    fi
+
+    # Regardless of whether we just created the directory or it
+    # already existed, check whether it is writable by the configured
+    # user
+    for dir in ${PID_DIR} "${OCF_RESKEY_mnesia_base}" "${OCF_RESKEY_log_dir}"; do
+        if test -e ${dir}; then
+            local files
+            files=$(su -s /bin/sh - $OCF_RESKEY_username -c "find ${dir} ! -writable")
+            if [ "${files}" ]; then
+                ocf_log warn "Directory ${dir} is not writable by ${OCF_RESKEY_username}, chowning."
+                chown -R ${OCF_RESKEY_username}:${OCF_RESKEY_groupname} "${dir}"
+            fi
+        fi
+    done
+
+    export LL="${OCF_RESOURCE_INSTANCE}[$$]:"
+    update_cookie
+}
+
+# Return a RabbitMQ node to its virgin state.
+# For reset and force_reset to succeed the RabbitMQ application must have been stopped.
+# If the app cannot be stopped, beam will be killed and mnesia files will be removed.
+reset_mnesia() {
+    local LH="${LL} reset_mnesia():"
+    local make_amnesia=false
+    local rc=$OCF_ERR_GENERIC
+
+    # check status of a beam process
+    get_status
+    rc=$?
+    if [ $rc -eq 0 ] ; then
+        # beam is running
+        # check status of rabbit app and stop it, if it is running
+        get_status rabbit
+        rc=$?
+        if [ $rc -eq 0 ] ; then
+            # rabbit app is running, have to stop it
+            ocf_log info "${LH} Stopping RMQ-app prior to reset the mnesia."
+            stop_rmq_server_app
+            rc=$?
+            if [ $rc -ne 0 ] ; then
+                 ocf_log warn "${LH} RMQ-app can't be stopped."
+                 make_amnesia=true
+            fi
+        fi
+
+        if ! $make_amnesia ; then
+            # rabbit app is not running, reset mnesia
+            ocf_log info "${LH} Execute reset with timeout: ${TIMEOUT_ARG}"
+            su_rabbit_cmd "${OCF_RESKEY_ctl} reset"
+            rc=$?
+            if [ $rc -ne 0 ] ; then
+                ocf_log info "${LH} Execute force_reset with timeout: ${TIMEOUT_ARG}"
+                su_rabbit_cmd "${OCF_RESKEY_ctl} force_reset"
+                rc=$?
+                if [ $rc -ne 0 ] ; then
+                    ocf_log warn "${LH} Mnesia couldn't cleaned, even by force-reset command."
+                    make_amnesia=true
+                fi
+            fi
+        fi
+    else
+        # there is no beam running
+        make_amnesia=true
+        ocf_log warn "${LH} There is no Beam process running."
+    fi
+
+    # remove mnesia files, if required
+    if $make_amnesia ; then
+        kill_rmq_and_remove_pid
+        ocf_run rm -rf "${MNESIA_FILES}"
+        mnesia_schema_location="${OCF_RESKEY_mnesia_schema_base}/Mnesia.$(rabbit_node_name $(get_hostname))"
+        ocf_run rm -rf "$mnesia_schema_location"
+        ocf_log warn "${LH} Mnesia files appear corrupted and have been removed from ${MNESIA_FILES} and $mnesia_schema_location"
+    fi
+    # always return OCF SUCCESS
+    return $OCF_SUCCESS
+}
+
+
+block_client_access()
+{
+    # do not add temporary RMQ blocking rule, if it is already exist
+    # otherwise, try to add a blocking rule with max of 5 retries
+    local tries=5
+    until $(iptables -nvL --wait | grep -q 'temporary RMQ block') || [ $tries -eq 0 ]; do
+      tries=$((tries-1))
+      iptables --wait -I INPUT -p tcp -m tcp --dport ${OCF_RESKEY_node_port} -m state --state NEW,RELATED,ESTABLISHED \
+      -m comment --comment 'temporary RMQ block' -j REJECT --reject-with tcp-reset
+      sleep 1
+    done
+    if [ $tries -eq 0 ]; then
+        return $OCF_ERR_GENERIC
+    else
+        return $OCF_SUCCESS
+    fi
+}
+
+unblock_client_access()
+{
+    # remove all temporary RMQ blocking rules, if there are more than one exist
+    for i in $(iptables -nvL --wait --line-numbers | awk '/temporary RMQ block/ {print $1}'); do
+      iptables --wait -D INPUT -p tcp -m tcp --dport ${OCF_RESKEY_node_port} -m state --state NEW,RELATED,ESTABLISHED \
+      -m comment --comment 'temporary RMQ block' -j REJECT --reject-with tcp-reset
+    done
+}
+
+get_nodes__base(){
+    local infotype=''
+    local rc=$OCF_ERR_GENERIC
+    local c_status
+
+    if [ "$1" = 'nodes' ]
+    then
+        infotype='db_nodes'
+    elif [ "$1" = 'running' ]
+    then
+        infotype='running_db_nodes'
+    fi
+    c_status=`${OCF_RESKEY_ctl} eval "mnesia:system_info(${infotype})." 2>/dev/null`
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+        echo ''
+        return $OCF_ERR_GENERIC
+    fi
+    # translate line like '{running_nodes,['rabbit@node-1','rabbit@node-2','rabbit@node-3']},' to node_list
+    echo $(echo "${c_status}" | awk -F, '{ for (i=1;i<=NF;i++) { if ($i ~ /@/) { gsub(/[\[\]}{]/,"",$i); print $i; } }}' | tr -d  "\'")
+    return $OCF_SUCCESS
+}
+
+get_nodes() {
+    echo $(get_nodes__base nodes)
+    return $?
+}
+
+get_running_nodes() {
+    echo $(get_nodes__base running)
+    return $?
+}
+
+# Get alive cluster nodes in visible partition, but the specified one
+get_alive_pacemaker_nodes_but()
+{
+    if [ -z "$1" ]; then
+        echo `crm_node -l -p | sed -e '/(null)/d'`
+    else
+        echo `crm_node -l -p | sed -e "s/${1}//g" | sed -e '/(null)/d'`
+    fi
+}
+
+# Get current master. If a parameter is provided,
+# do not check node with that name
+get_master_name_but()
+{
+    local node
+    for node in $(get_alive_pacemaker_nodes_but "$@")
+    do
+        ocf_log info "${LH} looking if $node is master"
+
+        if is_master $node; then
+            ocf_log info "${LH} master is $node"
+            echo $node
+            break
+        fi
+    done
+}
+
+# Evals some erlang code on current node
+erl_eval() {
+    local fmt="${1:?}"
+    shift
+
+    $COMMAND_TIMEOUT ${OCF_RESKEY_ctl} eval "$(printf "$fmt" "$@")"
+}
+
+# Returns 0 if we are clustered with provideded node
+is_clustered_with()
+{
+    local LH="${LH}: is_clustered_with: "
+    local node_name
+    local rc
+    node_name=$(rabbit_node_name $1)
+
+    local seen_as_running
+    seen_as_running=$(erl_eval "lists:member('%s', rabbit_mnesia:cluster_nodes(running))." "$node_name")
+    rc=$?
+    if [ "$rc" -ne 0 ]; then
+        ocf_log err "${LH} Failed to check whether '$node_name' is considered running by us"
+        # XXX Or should we give remote node benefit of a doubt?
+        return 1
+    elif [ "$seen_as_running" != true ]; then
+        ocf_log info "${LH} Node $node_name is not running, considering it not clustered with us"
+        return 1
+    fi
+
+    local seen_as_partitioned
+    seen_as_partitioned=$(erl_eval "lists:member('%s', rabbit_node_monitor:partitions())." "$node_name")
+    rc=$?
+    if [ "$rc" -ne 0 ]; then
+        ocf_log err "${LH} Failed to check whether '$node_name' is partitioned with us"
+        # XXX Or should we give remote node benefit of a doubt?
+        return 1
+    elif [ "$seen_as_partitioned" != false ]; then
+        ocf_log info "${LH} Node $node_name is partitioned from us"
+        return 1
+    fi
+
+    return $?
+}
+
+
+check_need_join_to() {
+    local join_to
+    local node
+    local running_nodes
+    local rc=$OCF_ERR_GENERIC
+
+    rc=0
+    join_to=$(rabbit_node_name $1)
+    running_nodes=$(get_running_nodes)
+    for node in $running_nodes ; do
+        if [ "${join_to}" = "${node}" ] ; then
+            rc=1
+            break
+        fi
+    done
+
+    return $rc
+}
+
+# Update erlang cookie, if it has been specified
+update_cookie() {
+    local cookie_file_content
+    if [ "${OCF_RESKEY_erlang_cookie}" != 'false' ] ; then
+        if [ -f "${OCF_RESKEY_erlang_cookie_file}" ]; then
+            # First line of cookie file without newline
+            cookie_file_content=$(head -n1 "${OCF_RESKEY_erlang_cookie_file}" | perl -pe chomp)
+        fi
+        # As there is a brief period of time when the file is empty
+        # (shell redirection has already opened and truncated file,
+        # and echo hasn't finished its job), we are doing this write
+        # only when cookie has changed.
+        if [ "${OCF_RESKEY_erlang_cookie}" != "${cookie_file_content}" ]; then
+            echo "${OCF_RESKEY_erlang_cookie}" > "${OCF_RESKEY_erlang_cookie_file}"
+        fi
+        # And this are idempotent operations, so we don't have to
+        # check any preconditions for running them.
+        chown ${OCF_RESKEY_username}:${OCF_RESKEY_groupname} "${OCF_RESKEY_erlang_cookie_file}"
+        chmod 600 "${OCF_RESKEY_erlang_cookie_file}"
+    fi
+    return $OCF_SUCCESS
+}
+
+# Stop rmq beam process by pid and by rabbit node name match. Returns SUCCESS/ERROR
+kill_rmq_and_remove_pid() {
+    local LH="${LL} kill_rmq_and_remove_pid():"
+    # Stop the rabbitmq-server by its pidfile, use the name matching as a fallback,
+    # and ignore the exit code
+    proc_stop "${OCF_RESKEY_pid_file}" "beam.*${RABBITMQ_NODENAME}" "${OCF_RESKEY_stop_time}"
+    # Ensure the beam.smp stopped by the rabbit node name matching as well
+    proc_stop none "beam.*${RABBITMQ_NODENAME}" "${OCF_RESKEY_stop_time}"
+    if [ $? -eq 0 ] ; then
+        return $OCF_SUCCESS
+    else
+        return $OCF_ERR_GENERIC
+    fi
+}
+
+trim_var(){
+    local string="$*"
+    echo ${string%% }
+}
+
+action_validate() {
+    # todo(sv): validate some incoming parameters
+    OCF_RESKEY_CRM_meta_notify_post=$(trim_var $OCF_RESKEY_CRM_meta_notify_post)
+    OCF_RESKEY_CRM_meta_notify_pre=$(trim_var $OCF_RESKEY_CRM_meta_notify_pre)
+    OCF_RESKEY_CRM_meta_notify_start=$(trim_var $OCF_RESKEY_CRM_meta_notify_start)
+    OCF_RESKEY_CRM_meta_notify_stop=$(trim_var $OCF_RESKEY_CRM_meta_notify_stop)
+    OCF_RESKEY_CRM_meta_notify_start_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_start_resource)
+    OCF_RESKEY_CRM_meta_notify_stop_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_stop_resource)
+    OCF_RESKEY_CRM_meta_notify_active_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_active_resource)
+    OCF_RESKEY_CRM_meta_notify_inactive_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_inactive_resource)
+    OCF_RESKEY_CRM_meta_notify_start_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_start_uname)
+    OCF_RESKEY_CRM_meta_notify_stop_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_stop_uname)
+    OCF_RESKEY_CRM_meta_notify_active_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_active_uname)
+    OCF_RESKEY_CRM_meta_notify_master_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_master_resource)
+    OCF_RESKEY_CRM_meta_notify_master_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_master_uname)
+    OCF_RESKEY_CRM_meta_notify_demote_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_demote_resource)
+    OCF_RESKEY_CRM_meta_notify_demote_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_demote_uname)
+    OCF_RESKEY_CRM_meta_notify_slave_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_slave_resource)
+    OCF_RESKEY_CRM_meta_notify_slave_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_slave_uname)
+    OCF_RESKEY_CRM_meta_notify_promote_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_promote_resource)
+    OCF_RESKEY_CRM_meta_notify_promote_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_promote_uname)
+    return $OCF_SUCCESS
+}
+
+update_rabbit_start_time_if_rc() {
+    local nowtime
+    local rc=$1
+    if [ $rc -eq 0 ]; then
+        nowtime="$(now)"
+        ocf_log info "${LH} Rabbit app started successfully. Updating start time attribute with ${nowtime}"
+        ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-start-time' --update "${nowtime}"
+    fi
+}
+
+join_to_cluster() {
+    local node="$1"
+    local rmq_node
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} join_to_cluster():"
+
+    ocf_log info "${LH} start."
+
+    rmq_node=$(rabbit_node_name $node)
+    ocf_log info "${LH} Joining to cluster by node '${rmq_node}'."
+    get_status rabbit
+    rc=$?
+    if [ $rc -eq $OCF_SUCCESS ] ; then
+        ocf_log info "${LH} rabbitmq app will be stopped."
+        stop_rmq_server_app
+        rc=$?
+        if [ $rc -ne 0 ] ; then
+            ocf_log err "${LH} Can't stop rabbitmq app by stop_app command. Stopping."
+            action_stop
+            return $OCF_ERR_GENERIC
+        fi
+    fi
+    ocf_log info "${LH} Execute join_cluster with timeout: ${TIMEOUT_ARG}"
+    su_rabbit_cmd "${OCF_RESKEY_ctl} join_cluster $rmq_node"
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+        ocf_log err "${LH} Can't join to cluster by node '${rmq_node}'. Stopping."
+        action_stop
+        return $OCF_ERR_GENERIC
+    fi
+    sleep 2
+    try_to_start_rmq_app
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+        ocf_log err "${LH} Can't start RMQ app after join to cluster. Stopping."
+        action_stop
+        return $OCF_ERR_GENERIC
+    else
+        update_rabbit_start_time_if_rc 0
+        ocf_log info "${LH} Joined to cluster succesfully."
+    fi
+
+    ocf_log info "${LH} end."
+    return $rc
+}
+
+unjoin_nodes_from_cluster() {
+    # node names of the nodes where the pcs resource is being stopped
+    local nodelist="$1"
+    local hostname
+    local nodename
+    local rc=$OCF_ERR_GENERIC
+    local rnode
+    # nodes in rabbit cluster db
+    local nodes_in_cluster
+    local LH="${LL} unjoin_nodes_from_cluster():"
+
+    nodes_in_cluster=$(get_nodes)
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+        # no nodes in node list, nothing to do
+        return $OCF_SUCCESS
+    fi
+
+    # unjoin all cluster nodes which are being stopped (i.e. recieved post-stop notify), except *this* node
+    # before to unjoin the nodes, make sure they were disconnected from *this* node
+    for hostname in $nodelist ; do
+        nodename=$(rabbit_node_name $hostname)
+        if [ "${nodename}" = "${RABBITMQ_NODENAME}" ] ; then
+            continue
+        fi
+        for rnode in $nodes_in_cluster ; do
+            if [ "${nodename}" = "${rnode}" ] ; then
+                # disconnect node being unjoined from this node
+                ocf_run ${OCF_RESKEY_ctl} eval "disconnect_node(list_to_atom(\"${nodename}\"))." 2>&1
+                rc=$?
+                if [ $rc -eq $OCF_SUCCESS ] ; then
+                    ocf_log info "${LH} node '${nodename}' disconnected succesfully."
+                else
+                    ocf_log info "${LH} disconnecting node '${nodename}' failed."
+                fi
+
+                # unjoin node
+                # when the rabbit node went down, its status
+                # remains 'running' for a while, so few retries are required
+                local tries=0
+                until [ $tries -eq 5 ]; do
+                    tries=$((tries+1))
+                    if is_clustered_with $nodename; then
+                        ocf_log info "${LH} the ${nodename} is alive and cannot be kicked from the cluster yet"
+                    else
+                        break
+                    fi
+                    sleep 10
+                done
+                ocf_log info "${LH} Execute forget_cluster_node with timeout: ${TIMEOUT_ARG}"
+                su_rabbit_cmd "${OCF_RESKEY_ctl} forget_cluster_node ${nodename}"
+                rc=$?
+                if [ $rc -eq 0 ] ; then
+                   ocf_log info "${LH} node '${nodename}' unjoined succesfully."
+                else
+                   ocf_log warn "${LH} unjoining node '${nodename}' failed."
+                fi
+            fi
+        done
+    done
+    return $OCF_SUCCESS
+}
+
+# Stop RMQ beam server process. Returns SUCCESS/ERROR
+stop_server_process() {
+    local pid
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} stop_server_process():"
+
+    pid=$(cat ${OCF_RESKEY_pid_file})
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+        # Try to stop without known PID
+        ocf_log err "${LH} RMQ-server process PIDFILE was not found!"
+        su_rabbit_cmd "${OCF_RESKEY_ctl} stop >> \"${OCF_RESKEY_log_dir}/shutdown_log\" 2>&1"
+        if [ $? -eq 0 ] ; then
+            ocf_log info "${LH} RMQ-server process stopped succesfully, although there was no PIDFILE found."
+            ocf_log info "${LH} grant a graceful termintation window ${OCF_RESKEY_stop_time} to end its beam"
+            sleep "${OCF_RESKEY_stop_time}"
+        else
+            kill_rmq_and_remove_pid
+        fi
+    elif [ "${pid}" ] ; then
+        # Try to stop gracefully by known PID
+        ocf_log info "${LH} Execute stop with timeout: ${TIMEOUT_ARG}"
+        su_rabbit_cmd "${OCF_RESKEY_ctl} stop ${OCF_RESKEY_pid_file} >> \"${OCF_RESKEY_log_dir}/shutdown_log\" 2>&1"
+        [ $? -eq 0 ] && ocf_log info "${LH} RMQ-server process (PID=${pid}) stopped succesfully."
+    fi
+
+    # Ensure there is no beam process and pidfile left
+    pgrep -f "beam.*${RABBITMQ_NODENAME}" > /dev/null
+    rc=$?
+    if [ -f ${OCF_RESKEY_pid_file} -o $rc -eq 0 ] ; then
+        ocf_log warn "${LH} The pidfile or beam's still exist, forcing the RMQ-server cleanup"
+        kill_rmq_and_remove_pid
+        return $?
+    else
+        return $OCF_SUCCESS
+    fi
+}
+
+# Stop RMQ-app. Return OCF_SUCCESS, if the app was stopped,
+# otherwise return OCF_ERR_GENERIC
+stop_rmq_server_app() {
+    local rc=$OCF_ERR_GENERIC
+
+    # if the beam process isn't running, then rabbit app is stopped as well
+    get_status
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+        return $OCF_SUCCESS
+    fi
+
+    # stop the app
+    ocf_log info "${LH} Execute stop_app with timeout: ${TIMEOUT_ARG}"
+    su_rabbit_cmd "${OCF_RESKEY_ctl} stop_app >> \"${OCF_RESKEY_log_dir}/shutdown_log\" 2>&1"
+    rc=$?
+    if [ $rc -ne 0 ] ; then
+         ocf_log err "${LH} RMQ-server app cannot be stopped."
+         return $OCF_ERR_GENERIC
+    fi
+
+    get_status rabbit
+    rc=$?
+    if [ $rc -ne $OCF_SUCCESS ] ; then
+        ocf_log info "${LH} RMQ-server app stopped succesfully."
+        rc=$OCF_SUCCESS
+    else
+        ocf_log err "${LH} RMQ-server app cannot be stopped."
+        rc=$OCF_ERR_GENERIC
+    fi
+
+    return $rc
+}
+
+start_beam_process() {
+    local command
+    local rc=$OCF_ERR_GENERIC
+    local ts_end
+    local pf_end
+    local pid
+    local LH="${LL} start_beam_process():"
+
+    # remove old PID-file if it exists
+    if [ -f "${OCF_RESKEY_pid_file}" ] ; then
+        ocf_log warn "${LH} found old PID-file '${OCF_RESKEY_pid_file}'."
+        pid=$(cat ${OCF_RESKEY_pid_file})
+        if [ "${pid}" -a -d "/proc/${pid}" ] ; then
+            ocf_run cat /proc/${pid}/cmdline | grep -c 'bin/beam' > /dev/null 2>&1
+            rc=$?
+            if [ $rc -eq $OCF_SUCCESS ] ; then
+                ocf_log warn "${LH} found beam process with PID=${pid}, killing...'."
+                ocf_run kill -TERM $pid
+            else
+                ocf_log err "${LH} found unknown process with PID=${pid} from '${OCF_RESKEY_pid_file}'."
+                return $OCF_ERR_GENERIC
+            fi
+        fi
+        ocf_run rm -f $OCF_RESKEY_pid_file
+    fi
+
+    [ -f /etc/default/rabbitmq-server ] && . /etc/default/rabbitmq-server
+
+    # run beam process
+    command="${OCF_RESKEY_binary} >> \"${OCF_RESKEY_log_dir}/startup_log\" 2>/dev/null"
+    RABBITMQ_NODE_ONLY=1 su rabbitmq -s /bin/sh -c "${command}"&
+    ts_end=$(( $(now) + ${OCF_RESKEY_start_time} ))
+    rc=$OCF_ERR_GENERIC
+    while [ $(now) -lt ${ts_end} ]; do
+        # waiting for normal start of beam
+        pid=0
+        pf_end=$(( $(now) + 3 ))
+        while [ $(now) -lt ${pf_end} ]; do
+            # waiting for OCF_RESKEY_pid_file of beam process
+            if [ -f "${OCF_RESKEY_pid_file}" ] ; then
+                pid=$(cat ${OCF_RESKEY_pid_file})
+                break
+            fi
+            sleep 1
+        done
+        if [ "${pid}" != "0" -a -d "/proc/${pid}" ] ; then
+            rc=$OCF_SUCCESS
+            break
+        fi
+        sleep 2
+    done
+    if [ $rc -ne $OCF_SUCCESS ]; then
+        if [ "${pid}" = "0" ] ; then
+            ocf_log warn "${LH} PID-file '${OCF_RESKEY_pid_file}' not found"
+        fi
+        ocf_log err "${LH} RMQ-runtime (beam) didn't start succesfully (rc=${rc})."
+    fi
+
+    return $rc
+}
+
+check_plugins() {
+  # Check if it's safe to load plugins and if we need to do so. Logic is:
+  #   if (EnabledPlugins > 0) and (ActivePlugins == 0) ; then it's safe to load
+  # If we have at least one active plugin, then it's not safe to re-load them
+  # because plugins:setup() would remove existing dependency plugins in plugins_expand_dir.
+  ${OCF_RESKEY_ctl} eval '{ok, EnabledFile} = application:get_env(rabbit, enabled_plugins_file), EnabledPlugins = rabbit_plugins:read_enabled(EnabledFile), ActivePlugins = rabbit_plugins:active(), if length(EnabledPlugins)>0 -> if length(ActivePlugins)==0 -> erlang:error("need_to_load_plugins"); true -> false end; true -> false end.'
+  return $?
+}
+
+load_plugins() {
+  check_plugins
+  local rc=$?
+  if [ $rc -eq 0 ] ; then
+    return 0
+  else
+    ${OCF_RESKEY_ctl} eval 'ToBeLoaded = rabbit_plugins:setup(), ok = app_utils:load_applications(ToBeLoaded), StartupApps = app_utils:app_dependency_order(ToBeLoaded,false), app_utils:start_applications(StartupApps).'
+    return $?
+  fi
+}
+
+list_active_plugins() {
+  local list
+  list=`${OCF_RESKEY_ctl} eval 'rabbit_plugins:active().'`
+  echo "${list}"
+}
+
+try_to_start_rmq_app() {
+    local startup_log="${1:-${OCF_RESKEY_log_dir}/startup_log}"
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} try_to_start_rmq_app():"
+
+    get_status
+    rc=$?
+    if [ $rc -ne $OCF_SUCCESS ] ; then
+        ocf_log info "${LH} RMQ-runtime (beam) not started, starting..."
+        start_beam_process
+        rc=$?
+        if [ $rc -ne $OCF_SUCCESS ]; then
+            ocf_log err "${LH} Failed to start beam - returning from the function"
+            return $OCF_ERR_GENERIC
+        fi
+    fi
+
+
+    if [ -z "${startup_log}" ] ; then
+        startup_log="${OCF_RESKEY_log_dir}/startup_log"
+    fi
+
+    ocf_log info "${LH} begin."
+    ocf_log info "${LH} Execute start_app with timeout: ${TIMEOUT_ARG}"
+    su_rabbit_cmd "${OCF_RESKEY_ctl} start_app >>${startup_log} 2>&1"
+    rc=$?
+    if [ $rc -eq 0 ] ; then
+        ocf_log info "${LH} start_app was successful."
+        ocf_log info "${LH} waiting for start to finish with timeout: ${TIMEOUT_ARG}"
+        su_rabbit_cmd "${OCF_RESKEY_ctl} wait ${OCF_RESKEY_pid_file}"
+        rc=$?
+        if [ $rc -ne 0 ] ; then
+             ocf_log err "${LH} RMQ-server app failed to wait for start."
+             return $OCF_ERR_GENERIC
+        fi
+        rc=$OCF_SUCCESS
+        # Loading enabled modules
+        ocf_log info "${LH} start plugins."
+        load_plugins
+        local mrc=$?
+        if [ $mrc -eq 0 ] ; then
+          local mlist
+          mlist=`list_active_plugins`
+          ocf_log info "${LH} Starting plugins: ${mlist}"
+        else
+          ocf_log info "${LH} Starting plugins: failed."
+        fi
+    else
+        ocf_log info "${LH} start_app failed."
+        rc=$OCF_ERR_GENERIC
+    fi
+    return $rc
+}
+
+start_rmq_server_app() {
+    local rc=$OCF_ERR_GENERIC
+    local startup_log="${OCF_RESKEY_log_dir}/startup_log"
+    local startup_output
+    local LH="${LL} start_rmq_server_app():"
+    local a
+
+    #We are performing initial start check.
+    #We are not ready to provide service.
+    #Clients should not have access.
+
+
+    ocf_log info "${LH} begin."
+    # Safe-unblock the rules, if there are any
+    unblock_client_access
+    # Apply the blocking rule
+    block_client_access
+    rc=$?
+    if [ $rc -eq $OCF_SUCCESS ]; then
+      ocf_log info "${LH} blocked access to RMQ port"
+    else
+      ocf_log err "${LH} cannot block access to RMQ port!"
+      return $OCF_ERR_GENERIC
+    fi
+    get_status
+    rc=$?
+    if [ $rc -ne $OCF_SUCCESS ] ; then
+        ocf_log info "${LH} RMQ-runtime (beam) not started, starting..."
+        start_beam_process
+        rc=$?
+        if [ $rc -ne $OCF_SUCCESS ]; then
+            unblock_client_access
+            ocf_log info "${LH} unblocked access to RMQ port"
+            return $OCF_ERR_GENERIC
+        fi
+    fi
+
+    ocf_log info "${LH} RMQ-server app not started, starting..."
+    try_to_start_rmq_app "$startup_log"
+    rc=$?
+    if [ $rc -eq $OCF_SUCCESS ] ; then
+        # rabbitmq-server started successfuly as master of cluster
+        master_score $MIN_MASTER_SCORE
+        stop_rmq_server_app
+        rc=$?
+        if [ $rc -ne 0 ] ; then
+            ocf_log err "${LH} RMQ-server app can't be stopped. Beam will be killed."
+            kill_rmq_and_remove_pid
+            unblock_client_access
+            ocf_log info "${LH} unblocked access to RMQ port"
+            return $OCF_ERR_GENERIC
+        fi
+    else
+       # error at start RMQ-server
+       ocf_log warn "${LH} RMQ-server app can't start without Mnesia cleaning."
+       for a in $(seq 1 10) ; do
+            rc=$OCF_ERR_GENERIC
+            reset_mnesia || break
+            try_to_start_rmq_app "$startup_log"
+            rc=$?
+            if [ $rc -eq $OCF_SUCCESS ]; then
+                stop_rmq_server_app
+                rc=$?
+                if [ $rc -eq $OCF_SUCCESS ]; then
+                    ocf_log info "${LH} RMQ-server app Mnesia cleaned successfully."
+                    rc=$OCF_SUCCESS
+                    master_score $MIN_MASTER_SCORE
+                    break
+                else
+                    ocf_log err "${LH} RMQ-server app can't be stopped during Mnesia cleaning. Beam will be killed."
+                    kill_rmq_and_remove_pid
+                    unblock_client_access
+                    ocf_log info "${LH} unblocked access to RMQ port"
+                    return $OCF_ERR_GENERIC
+                fi
+            fi
+        done
+    fi
+    if [ $rc -eq $OCF_ERR_GENERIC ] ; then
+         ocf_log err "${LH} RMQ-server can't be started while many tries. Beam will be killed."
+         kill_rmq_and_remove_pid
+    fi
+    ocf_log info "${LH} end."
+    unblock_client_access
+    ocf_log info "${LH} unblocked access to RMQ port"
+    return $rc
+}
+
+# check status of rabbit beam process or a rabbit app, if rabbit arg specified
+# by default, test if the kernel app is running, otherwise consider it is "not running"
+get_status() {
+    local what="${1:-kernel}"
+    local rc=$OCF_NOT_RUNNING
+    local LH="${LL} get_status():"
+    local body
+    local beam_running
+
+    body=$( ${COMMAND_TIMEOUT} ${OCF_RESKEY_ctl} eval 'rabbit_misc:which_applications().' 2>&1 )
+    rc=$?
+
+    pgrep -f "beam.*${RABBITMQ_NODENAME}" > /dev/null
+    beam_running=$?
+    # report not running only if the which_applications() reported an error AND the beam is not running
+    if [ $rc -ne 0 -a $beam_running -ne 0 ] ; then
+        ocf_log info "${LH} failed with code ${rc}. Command output: ${body}"
+        return $OCF_NOT_RUNNING
+    # return a generic error, if there were errors and beam is found running
+    elif [ $rc -ne 0 ] ; then
+        ocf_log info "${LH} found the beam process running but failed with code ${rc}. Command output: ${body}"
+        return $OCF_ERR_GENERIC
+    fi
+
+    # try to parse the which_applications() output only if it exited w/o errors
+    if [ "${what}" -a $rc -eq 0 ] ; then
+        rc=$OCF_NOT_RUNNING
+        echo "$body" | grep "\{${what}," > /dev/null 2>&1 && rc=$OCF_SUCCESS
+
+        if [ $rc -ne $OCF_SUCCESS ] ; then
+            ocf_log info "${LH} app ${what} was not found in command output: ${body}"
+        fi
+    fi
+
+    [ $rc -ne $OCF_SUCCESS ] && rc=$OCF_NOT_RUNNING
+    return $rc
+}
+
+action_status() {
+    local rc=$OCF_ERR_GENERIC
+
+    get_status
+    rc=$?
+    return $rc
+}
+
+# return 0, if given node has a master attribute in CIB,
+# otherwise, return 1
+is_master() {
+    local result
+    result=`crm_attribute -N "${1}" -l reboot --name 'rabbit-master' --query 2>/dev/null |\
+           awk '{print $3}' | awk -F "=" '{print $2}' | sed -e '/(null)/d'`
+    if [ "${result}" != 'true' ] ; then
+        return 1
+    fi
+    return 0
+}
+
+# Verify if su_rabbit_cmd exited by timeout by checking its return code.
+# If it did not, return 0. If it did AND it is
+# $OCF_RESKEY_max_rabbitmqctl_timeouts'th timeout in a row,
+# return 2 to signal get_monitor that it should
+# exit with error. Otherwise return 1 to signal that there was a timeout,
+# but it should be ignored. Timeouts for different operations are tracked
+# separately. The second argument is used to distingush them.
+check_timeouts() {
+    local op_rc=$1
+    local timeouts_attr_name=$2
+    local op_name=$3
+
+    # 75 is EX_TEMPFAIL from sysexits, and is used by rabbitmqctl to signal about
+    # timeout.
+    if [ $op_rc -ne 124 -a $op_rc -ne 137 -a $op_rc -ne 75 ]; then
+        ocf_update_private_attr $timeouts_attr_name 0
+        return 0
+    fi
+
+    local count
+    count=$(ocf_get_private_attr $timeouts_attr_name 0)
+
+    count=$((count+1))
+    # There is a slight chance that this piece of code will be executed twice simultaneously.
+    # As a result, $timeouts_attr_name's value will be one less than it should be. But we don't need
+    # precise calculation here.
+    ocf_update_private_attr $timeouts_attr_name $count
+
+    if [ $count -lt $OCF_RESKEY_max_rabbitmqctl_timeouts ]; then
+        ocf_log warn "${LH} 'rabbitmqctl $op_name' timed out $count of max. $OCF_RESKEY_max_rabbitmqctl_timeouts time(s) in a row. Doing nothing for now."
+        return 1
+    else
+        ocf_log err "${LH} 'rabbitmqctl $op_name' timed out $count of max. $OCF_RESKEY_max_rabbitmqctl_timeouts time(s) in a row and is not responding. The resource is failed."
+        return 2
+    fi
+}
+
+wait_sync() {
+    local wait_time=$1
+    local queues
+    local opt_arg=""
+
+    if [ "$OCF_RESKEY_rmq_feature_local_list_queues" = "true" ]; then
+        opt_arg="--local"
+    fi
+
+    queues="${COMMAND_TIMEOUT} ${OCF_RESKEY_ctl} list_queues $opt_arg name state"
+
+    su_rabbit_cmd -t "${wait_time}" "sh -c \"while ${queues} | grep -q 'syncing,'; \
+          do sleep 2; done\""
+
+    return $?
+}
+
+get_monitor() {
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} get_monitor():"
+    local status_master=1
+    local rabbit_running
+    local name
+    local node
+    local node_start_time
+    local nowtime
+    local partitions_report
+    local node_partitions
+
+    ocf_log info "${LH} CHECK LEVEL IS: ${OCF_CHECK_LEVEL}"
+    get_status
+    rc=$?
+    if [ $rc -eq $OCF_NOT_RUNNING ] ; then
+        ocf_log info "${LH} get_status() returns ${rc}."
+        ocf_log info "${LH} ensuring this slave does not get promoted."
+        master_score 0
+        return $OCF_NOT_RUNNING
+    elif [ $rc -eq $OCF_SUCCESS ] ; then
+        ocf_log info "${LH} get_status() returns ${rc}."
+        ocf_log info "${LH} also checking if we are master."
+        get_status rabbit
+        rabbit_running=$?
+        is_master $THIS_PCMK_NODE
+        status_master=$?
+        ocf_log info "${LH} master attribute is ${status_master}"
+        if [ $status_master -eq 0 -a $rabbit_running -eq $OCF_SUCCESS ]
+        then
+            ocf_log info "${LH} We are the running master"
+            rc=$OCF_RUNNING_MASTER
+        elif [ $status_master -eq 0 -a $rabbit_running -ne $OCF_SUCCESS ] ; then
+            ocf_log err "${LH} We are the master and RMQ-runtime (beam) is not running. this is a failure"
+            exit $OCF_FAILED_MASTER
+        fi
+    fi
+    get_status rabbit
+    rabbit_running=$?
+    ocf_log info "${LH} checking if rabbit app is running"
+
+    if [ $rc -eq $OCF_RUNNING_MASTER ]; then
+        if [ $rabbit_running -eq $OCF_SUCCESS ]; then
+            ocf_log info "${LH} rabbit app is running and is master of cluster"
+        else
+            ocf_log err "${LH} we are the master and rabbit app is not running. This is a failure"
+            exit $OCF_FAILED_MASTER
+        fi
+    else
+        start_time=$((180 + $(ocf_get_private_attr 'rabbit-start-phase-1-time' 0)))
+        restart_order_time=$((60 + $(ocf_get_private_attr 'rabbit-ordered-to-restart' 0)))
+        nowtime=$(now)
+
+        # If we started more than 3 minutes ago, and
+        # we got order to restart less than 1 minute ago
+        if [ $nowtime -lt $restart_order_time ]; then
+            if [ $nowtime -gt $start_time ]; then
+                ocf_log err "${LH} failing because we have received an order to restart from the master"
+                stop_server_process
+                rc=$OCF_ERR_GENERIC
+            else
+                ocf_log warn "${LH} received an order to restart from the master, ignoring it because we have just started"
+            fi
+        fi
+    fi
+
+    if [ $rc -eq $OCF_ERR_GENERIC ]; then
+        ocf_log err "${LH} get_status() returns generic error ${rc}"
+        ocf_log info "${LH} ensuring this slave does not get promoted."
+        master_score 0
+        return $OCF_ERR_GENERIC
+    fi
+
+    # Recounting our master score
+    ocf_log info "${LH} preparing to update master score for node"
+    local our_start_time
+    local new_score
+    local node_start_time
+    local node_score
+
+    our_start_time=$(get_node_start_time $THIS_PCMK_NODE)
+
+    if [ $our_start_time -eq 0 ]; then
+        new_score=$MIN_MASTER_SCORE
+    else
+        new_score=$BEST_MASTER_SCORE
+        for node in $(get_alive_pacemaker_nodes_but $THIS_PCMK_NODE)
+        do
+            node_start_time=$(get_node_start_time $node)
+            node_score=$(get_node_master_score $node)
+
+            ocf_log info "${LH} comparing us (start time: $our_start_time, score: $new_score) with $node (start time: $node_start_time, score: $node_score)"
+            if [ $node_start_time -ne 0 -a $node_score -ne 0 -a $node_start_time -lt $our_start_time ]; then
+                new_score=$((node_score - 10 < new_score ? node_score - 10 : new_score ))
+            fi
+        done
+    fi
+
+    if [ "$new_score" -ne "$(get_node_master_score $THIS_PCMK_NODE)" ]; then
+        master_score $new_score
+    fi
+    ocf_log info "${LH} our start time is $our_start_time and score is $new_score"
+
+    # Skip all other checks if rabbit app is not running
+    if [ $rabbit_running -ne $OCF_SUCCESS ]; then
+        ocf_log info "${LH} RabbitMQ is not running, get_monitor function ready to return ${rc}"
+        return $rc
+    fi
+
+    # rc can be SUCCESS or RUNNING_MASTER, don't touch it unless there
+    # is some error uncovered by node_health_check
+    if ! node_health_check; then
+        rc=$OCF_ERR_GENERIC
+    fi
+
+    if [ $rc -eq $OCF_RUNNING_MASTER ] ; then
+        # If we are the master and healthy, perform various
+        # connectivity checks for other nodes in the cluster.
+        # Order a member to restart if something fishy happens with it.
+        # All cross-node checks MUST happen only here.
+
+        partitions_report="$(partitions_report)"
+
+        for node in $(get_alive_pacemaker_nodes_but $THIS_PCMK_NODE); do
+            # Restart node if we don't consider ourselves clustered with it
+            if ! is_clustered_with $node; then
+                ocf_log warn "${LH} node $node is not connected with us"
+                order_node_restart "$node"
+                continue
+            fi
+
+            # Restart node if it has any unresolved partitions
+            node_partitions=$(grep_partitions_report $node "$partitions_report")
+            if [ ! -z "$node_partitions" ]; then
+                ocf_log warn "${LH} Node $node thinks that it is partitoned with $node_partitions"
+                order_node_restart "$node"
+                continue
+            fi
+        done
+    fi
+
+    ocf_log info "${LH} get_monitor function ready to return ${rc}"
+    return $rc
+}
+
+order_node_restart() {
+    local node=${1:?}
+    ocf_log warn "${LH} Ordering node '$node' to restart"
+    ocf_update_private_attr 'rabbit-ordered-to-restart' "$(now)" "$node"
+}
+
+# Checks whether node is mentioned somewhere in report returned by
+# partitions_report()
+grep_partitions_report() {
+    local node="${1:?}"
+    local report="${2:?}"
+    local rabbit_node
+    rabbit_node=$(rabbit_node_name "$node")
+    echo "$report" | grep "PARTITIONED $rabbit_node:" | sed -e 's/^[^:]\+: //'
+}
+
+# Report partitions (if any) from viewpoint of every running node in cluster.
+# It is parseable/grepable version of `rabbitmqctl cluster_status`.
+#
+# If node sees partition, report will contain the line like:
+#     PARTITIONED node-name: list-of-nodes, which-node-name-considers, itself-partitioned-with
+partitions_report() {
+    $COMMAND_TIMEOUT xargs -0 ${OCF_RESKEY_ctl} eval <<EOF
+RpcTimeout = 10,
+
+Nodes = rabbit_mnesia:cluster_nodes(running),
+
+{Replies, _BadNodes} = gen_server:multi_call(Nodes, rabbit_node_monitor, partitions, RpcTimeout * 1000),
+
+lists:foreach(fun ({_, []}) -> ok;
+                  ({Node, Partitions}) ->
+                      PartitionsStr = string:join([atom_to_list(Part) || Part <- Partitions],
+                                                  ", "),
+                      io:format("PARTITIONED ~s: ~s~n",
+                                [Node, PartitionsStr])
+              end, Replies),
+
+ok.
+EOF
+}
+
+# Check if the rabbitmqctl control plane is alive.
+node_health_check() {
+    local rc
+    if [ "$OCF_RESKEY_rmq_feature_health_check" = true ]; then
+        node_health_check_local
+        rc=$?
+    else
+        node_health_check_legacy
+        rc=$?
+    fi
+    return $rc
+}
+
+node_health_check_local() {
+    local LH="${LH} node_health_check_local():"
+    local rc
+    local rc_timeouts
+
+    # Give node_health_check some time to handle timeout by itself.
+    # By using internal rabbitmqctl timeouts, we allow it to print
+    # more useful diagnostics
+    local timeout=$((TIMEOUT_ARG - 2))
+    su_rabbit_cmd "${OCF_RESKEY_ctl} node_health_check -t $timeout"
+    rc=$?
+
+    check_timeouts $rc "rabbit_node_health_check_timeouts" "node_health_check"
+    rc_timeouts=$?
+
+    if [ "$rc_timeouts" -eq 2 ]; then
+        master_score 0
+        ocf_log info "${LH} node_health_check timed out, retry limit reached"
+        return $OCF_ERR_GENERIC
+    elif [ "$rc_timeouts" -eq 1 ]; then
+        ocf_log info "${LH} node_health_check timed out, going to retry"
+        return $OCF_SUCCESS
+    fi
+
+    if [ "$rc" -ne 0 ]; then
+        ocf_log err "${LH} rabbitmqctl node_health_check exited with errors."
+        return $OCF_ERR_GENERIC
+    else
+        return $OCF_SUCCESS
+    fi
+}
+
+node_health_check_legacy() {
+    local rc_alive
+    local timeout_alive
+    su_rabbit_cmd "${OCF_RESKEY_ctl} list_channels > /dev/null 2>&1"
+    rc_alive=$?
+    [ $rc_alive -eq 137 -o $rc_alive -eq 124 ] && ocf_log err "${LH} 'rabbitmqctl list_channels' timed out, per-node explanation: $(enhanced_list_channels)"
+    check_timeouts $rc_alive "rabbit_list_channels_timeouts" "list_channels"
+    timeout_alive=$?
+
+    if [ $timeout_alive -eq 2 ]; then
+        master_score 0
+        return $OCF_ERR_GENERIC
+    elif [ $timeout_alive -eq 0 ]; then
+        if [ $rc_alive -ne 0 ]; then
+            ocf_log err "${LH} rabbitmqctl list_channels exited with errors."
+            rc=$OCF_ERR_GENERIC
+        fi
+    fi
+
+    # Check for memory alarms for this Master or Slave node.
+    # If alert found, reset the alarm
+    # and restart the resource as it likely means a dead end situation
+    # when rabbitmq cluster is running with blocked publishing due
+    # to high memory watermark exceeded.
+    local alarms
+    local rc_alarms
+    local timeout_alarms
+    alarms=`su_rabbit_cmd "${OCF_RESKEY_ctl} -q eval 'rabbit_alarm:get_alarms().'"`
+    rc_alarms=$?
+    check_timeouts $rc_alarms "rabbit_get_alarms_timeouts" "get_alarms"
+    timeout_alarms=$?
+
+    if [ $timeout_alarms -eq 2 ]; then
+        master_score 0
+        return $OCF_ERR_GENERIC
+
+    elif [ $timeout_alarms -eq 0 ]; then
+        if [ $rc_alarms -ne 0 ]; then
+            ocf_log err "${LH} rabbitmqctl get_alarms exited with errors."
+            rc=$OCF_ERR_GENERIC
+
+        elif [ -n "${alarms}" ]; then
+            for node in ${alarms}; do
+                name=`echo ${node} | perl -n -e "m/memory,'(?<n>\S+)+'/ && print \"$+{n}\n\""`
+                if [ "${name}" = "${RABBITMQ_NODENAME}" ] ; then
+                    ocf_log err "${LH} Found raised memory alarm. Erasing the alarm and restarting."
+                    su_rabbit_cmd "${OCF_RESKEY_ctl} set_vm_memory_high_watermark 10 > /dev/null 2>&1"
+                    rc=$OCF_ERR_GENERIC
+                    break
+                fi
+            done
+        fi
+    fi
+
+    if ! is_cluster_status_ok ; then
+        rc=$OCF_ERR_GENERIC
+    fi
+
+    # Check if the list of all queues is available,
+    # Also report some queues stats and total virtual memory.
+    local queues
+    local rc_queues
+    local timeout_queues
+    queues=`su_rabbit_cmd "${OCF_RESKEY_ctl} -q list_queues memory messages consumer_utilisation"`
+    rc_queues=$?
+    check_timeouts $rc_queues "rabbit_list_queues_timeouts" "list_queues"
+    timeout_queues=$?
+
+    if [ $timeout_queues -eq 2 ]; then
+        master_score 0
+        return $OCF_ERR_GENERIC
+
+    elif [ $timeout_queues -eq 0 ]; then
+        if [ $rc_queues -ne 0 ]; then
+            ocf_log err "${LH} rabbitmqctl list_queues exited with errors."
+            rc=$OCF_ERR_GENERIC
+
+        elif [ -n "${queues}" ]; then
+            local q_c
+            q_c=`printf %b "${queues}\n" | wc -l`
+            local mem
+            mem=`printf %b "${queues}\n" | awk -v sum=0 '{sum+=$1} END {print (sum/1048576)}'`
+            local mes
+            mes=`printf %b "${queues}\n" | awk -v sum=0 '{sum+=$2} END {print sum}'`
+            local c_u
+            c_u=`printf %b "${queues}\n" | awk -v sum=0 -v cnt=${q_c} '{sum+=$3} END {print (sum+1)/(cnt+1)}'`
+            local status
+            status=`echo $(su_rabbit_cmd "${OCF_RESKEY_ctl} -q status")`
+            ocf_log info "${LH} RabbitMQ is running ${q_c} queues consuming ${mem}m of ${TOTALVMEM}m total, with ${mes} queued messages, average consumer utilization ${c_u}"
+            ocf_log info "${LH} RabbitMQ status: ${status}"
+        fi
+    fi
+
+    return $rc
+}
+
+ocf_get_private_attr() {
+    local attr_name="${1:?}"
+    local attr_default_value="${2:?}"
+    local nodename="${3:-$THIS_PCMK_NODE}"
+    local count
+    count=$(attrd_updater -p --name "$attr_name" --node "$nodename" --query)
+    if [ $? -ne 0 ]; then
+        echo $attr_default_value
+    else
+        echo "$count" | awk -vdef_val="$attr_default_value" '{ gsub(/"/, "", $3); split($3, vals, "="); if (vals[2] != "") print vals[2]; else print def_val }'
+    fi
+}
+
+ocf_update_private_attr() {
+    local attr_name="${1:?}"
+    local attr_value="${2:?}"
+    local nodename="${3:-$THIS_PCMK_NODE}"
+    ocf_run attrd_updater -p --name "$attr_name" --node "$nodename" --update "$attr_value"
+}
+
+rabbitmqctl_with_timeout_check() {
+    local command="${1:?}"
+    local timeout_attr_name="${2:?}"
+
+    su_rabbit_cmd "${OCF_RESKEY_ctl} $command"
+    local rc=$?
+
+    check_timeouts $rc $timeout_attr_name "$command"
+    local has_timed_out=$?
+
+    case "$has_timed_out" in
+        0)
+            return $rc;;
+        1)
+            return 0;;
+        2)
+            return 1;;
+    esac
+}
+
+is_cluster_status_ok() {
+    local LH="${LH}: is_cluster_status_ok:"
+    rabbitmqctl_with_timeout_check cluster_status rabbit_cluster_status_timeouts > /dev/null 2>&1
+}
+
+action_monitor() {
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} monitor:"
+    ocf_log debug "${LH} action start."
+    if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
+        d=`date '+%Y%m%d %H:%M:%S'`
+        echo $d >> /tmp/rmq-monitor.log
+        env >> /tmp/rmq-monitor.log
+        echo "$d  [monitor] start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
+    fi
+    get_monitor
+    rc=$?
+    ocf_log debug "${LH} role: ${OCF_RESKEY_CRM_meta_role}"
+    ocf_log debug "${LH} result: $rc"
+    ocf_log debug "${LH} action end."
+    return $rc
+}
+
+
+action_start() {
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} start:"
+    local nowtime
+
+    if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
+        d=`date '+%Y%m%d %H:%M:%S'`
+        echo $d >> /tmp/rmq-start.log
+        env >> /tmp/rmq-start.log
+        echo "$d  [start]  start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
+    fi
+
+    ocf_log info "${LH} action begin."
+
+    get_status
+    rc=$?
+    if [ $rc -eq $OCF_SUCCESS ] ; then
+        ocf_log warn "${LH} RMQ-runtime (beam) already started."
+        return $OCF_SUCCESS
+    fi
+
+    local attrs_to_zero="rabbit_list_channels_timeouts rabbit_get_alarms_timeouts rabbit_list_queues_timeouts rabbit_cluster_status_timeouts rabbit_node_health_check_timeouts"
+    local attr_name_to_reset
+    for attr_name_to_reset in $attrs_to_zero; do
+        ocf_update_private_attr $attr_name_to_reset 0
+    done
+
+    nowtime=$(now)
+    ocf_log info "${LH} Setting phase 1 one start time to $nowtime"
+    ocf_update_private_attr 'rabbit-start-phase-1-time' "$nowtime"
+    ocf_log info "${LH} Deleting start time attribute"
+    ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-start-time' --delete
+    ocf_log info "${LH} Deleting master attribute"
+    ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --delete
+
+    ocf_log info "${LH} RMQ going to start."
+    start_rmq_server_app
+    rc=$?
+    if [ $rc -eq $OCF_SUCCESS ] ; then
+        ocf_log info "${LH} RMQ prepared for start succesfully."
+    fi
+
+    ocf_log info "${LH} action end."
+    return $rc
+}
+
+
+action_stop() {
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} stop:"
+
+    if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
+        d=$(date '+%Y%m%d %H:%M:%S')
+        echo $d >> /tmp/rmq-stop.log
+        env >> /tmp/rmq-stop.log
+        echo "$d  [stop]  start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
+    fi
+
+    ocf_log info "${LH} action begin."
+
+    ocf_log info "${LH} Deleting master attribute"
+    ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --delete
+    master_score 0
+    ocf_log info "${LH} Deleting start time attribute"
+    ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-start-time' --delete
+
+    # Wait for synced state first
+    ocf_log info "${LH} waiting $((OCF_RESKEY_stop_time/2)) to sync"
+    wait_sync $((OCF_RESKEY_stop_time/2))
+
+    ocf_log info "${LH} RMQ-runtime (beam) going to down."
+    stop_server_process
+
+    if [ $? -ne $OCF_SUCCESS ] ; then
+        ocf_log err "RMQ-runtime (beam) couldn't be stopped and will likely became unmanaged. Take care of it manually!"
+        ocf_log info "${LH} action end."
+        exit $OCF_ERR_GENERIC
+    fi
+
+    ocf_log info "${LH} RMQ-runtime (beam) not running."
+    ocf_log info "${LH} action end."
+    return $OCF_SUCCESS
+}
+
+#######################################################################
+# Enhanced list_channels:
+# - nodes are processed in parallel
+# - report contains information about which nodes timed out
+#
+# 'list_channels' is used as a healh-check for current node, but it
+# actually checks overall health of all node in cluster. And there were
+# some bugs where only one (non-local) channel became stuck, but OCF
+# script was wrongfully killing local node.
+#
+# Hopefully all such bugs are fixed, but if not - it will allow to
+# detect such conditions.
+#
+# Somewhat strange implementation is due to the following reasons:
+# - ability to support older versions of RabbitMQ which have reached
+#   end-of-life with single version of the script
+# - zero dependencies - for older versions this functionality could be
+#   implemented as a plugin, but it'll require this plugin installation
+enhanced_list_channels() {
+    # One second less than timeout of su_rabbit_cmd
+    local timeout=$((${TIMEOUT_ARG:-5} - 1))
+
+    su_rabbit_cmd "xargs -0 ${OCF_RESKEY_ctl} eval" <<EOF
+SecondsToCompletion = $timeout,
+
+%% Milliseconds since unix epoch
+Now = fun() ->
+              {Mega, Secs, Micro} = os:timestamp(),
+              Mili = Micro div 1000,
+              Mili + 1000 * (Secs + 1000000 * Mega)
+      end,
+
+%% We shouldn't continue execution past this time
+ShouldEndAt = Now() + SecondsToCompletion * 1000,
+
+%% How many milliseconds we still have
+Timeout = fun() ->
+                  case ShouldEndAt - Now() of
+                      Past when Past =< 0 ->
+                          0;
+                      Timeout ->
+                          Timeout
+                  end
+          end,
+
+%% Lambda combinator - for defining anonymous recursive functions
+Y = fun(F) ->
+            (fun (X) -> F(fun(Y) -> (X(X))(Y) end) end)(
+              fun (X) -> F(fun(Y) -> (X(X))(Y) end) end)
+    end,
+
+Parent = self(),
+
+ListChannels = Y(fun(Rec) ->
+                         fun (({Node, [], OkChannelsCount})) ->
+                                 Parent ! {Node, ok, OkChannelsCount};
+                             ({Node, [Chan|Rest], OkChannelsCount}) ->
+                                 case catch rpc:call(Node, rabbit_channel, info, [Chan], Timeout()) of
+                                     Infos when is_list(Infos) ->
+                                         Rec({Node, Rest, OkChannelsCount + 1});
+                                     {badrpc, {'EXIT', {noproc, _}}} ->
+                                         %% Channel became dead before we could request it's status, don't care
+                                         Rec({Node, Rest, OkChannelsCount});
+                                     Err ->
+                                         Parent ! {Node, Err, OkChannelsCount}
+                                 end
+                         end
+                 end),
+
+SingleNodeListing = fun(Node) ->
+                            case catch rpc:call(Node, pg_local, get_members, [rabbit_channels], Timeout()) of
+                                LocalChannels when is_list(LocalChannels) ->
+                                    ListChannels({Node, LocalChannels, 0});
+                                Err ->
+                                    Parent ! {Node, Err, 0}
+                            end
+                    end,
+
+AllNodes = rabbit_mnesia:cluster_nodes(running),
+[ spawn(fun() -> SingleNodeListing(Node) end) || Node <- AllNodes ],
+
+WaitForNodes = Y(fun(Rec) ->
+                  fun ({[], Acc}) ->
+                          Acc;
+                      ({RemainingNodes, Acc}) ->
+                          receive
+                              {Node, _Status, _ChannelCount} = Smth ->
+                                  RemainingNodes1 = lists:delete(Node, RemainingNodes),
+                                  Rec({RemainingNodes1, [Smth|Acc]})
+                              after Timeout() + 100 ->
+                                      Acc
+                              end
+                  end
+          end),
+
+Result = WaitForNodes({AllNodes, []}),
+
+ExpandedResult = [ case lists:keysearch(Node, 1, Result) of
+                       {value, NodeResult} ->
+                           NodeResult;
+                       false ->
+                           {Node, no_data_collected, 0}
+                   end || Node <- AllNodes ],
+
+ExpandedResult.
+EOF
+}
+
+#######################################################################
+# Join the cluster and return OCF_SUCCESS, if joined.
+# Return 10, if node is trying to join to itself or empty destination.
+# Return OCF_ERR_GENERIC, if cannot join.
+jjj_join () {
+    local join_to="$1"
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} jjj_join:"
+
+    my_host ${join_to}
+    rc=$?
+    ocf_log debug "${LH} node='${join_to}' rc='${rc}'"
+
+    # Check whether we are joining to ourselves
+    # or master host is not given
+    if [ $rc -ne 0 -a "${join_to}" ] ; then
+            ocf_log info "${LH} Joining to cluster by node '${join_to}'"
+            join_to_cluster "${join_to}"
+            rc=$?
+            if [ $rc -ne $OCF_SUCCESS ] ; then
+                ocf_log err "${LH} Failed to join the cluster. The mnesia will be reset."
+                reset_mnesia
+                rc=$OCF_ERR_GENERIC
+            fi
+    fi
+    return $rc
+}
+
+action_notify() {
+    local rc_join=$OCF_SUCCESS
+    local rc=$OCF_ERR_GENERIC
+    local rc2=$OCF_ERR_GENERIC
+    local LH="${LL} notify:"
+    local nodelist
+
+    if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
+        d=`date '+%Y%m%d %H:%M:%S'`
+        echo $d >> /tmp/rmq-notify.log
+        env >> /tmp/rmq-notify.log
+        echo "$d  [notify]  ${OCF_RESKEY_CRM_meta_notify_type}-${OCF_RESKEY_CRM_meta_notify_operation} promote='${OCF_RESKEY_CRM_meta_notify_promote_uname}' demote='${OCF_RESKEY_CRM_meta_notify_demote_uname}' master='${OCF_RESKEY_CRM_meta_notify_master_uname}' slave='${OCF_RESKEY_CRM_meta_notify_slave_uname}' start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
+    fi
+
+    if [ "${OCF_RESKEY_CRM_meta_notify_type}" = 'post' ] ; then
+        # POST- anything notify section
+        case "$OCF_RESKEY_CRM_meta_notify_operation" in
+            promote)
+                ocf_log info "${LH} post-promote begin."
+
+                rc=$OCF_SUCCESS
+
+                # Do nothing, if the list of nodes being promoted reported empty.
+                # Delegate recovery, if needed, to the "running out of the cluster" monitor's logic
+                if [ -z "${OCF_RESKEY_CRM_meta_notify_promote_uname}" ] ; then
+                    ocf_log warn "${LH} there are no nodes to join to reported on post-promote. Nothing to do."
+
+                elif my_host "${OCF_RESKEY_CRM_meta_notify_promote_uname}"; then
+                    ocf_log info "${LH} ignoring post-promote of self"
+
+                elif is_clustered_with "${OCF_RESKEY_CRM_meta_notify_promote_uname}"; then
+                    if get_status rabbit; then
+                        ocf_log info "${LH} we are already clustered with master - ${OCF_RESKEY_CRM_meta_notify_promote_uname}. Nothing to do."
+                    else
+                        ocf_log info "${LH} we are already clustered with master - ${OCF_RESKEY_CRM_meta_notify_promote_uname}. We only need to start the app."
+
+                        try_to_start_rmq_app
+                        rc2=$?
+                        update_rabbit_start_time_if_rc $rc2
+                    fi
+
+                else
+                    # Note, this should fail when the mnesia is inconsistent.
+                    # For example, when the "old" master processing the promition of the new one.
+                    # Later this ex-master node will rejoin the cluster at post-start.
+                    jjj_join "${OCF_RESKEY_CRM_meta_notify_promote_uname}"
+                    rc=$?
+                    if [ $rc -eq $OCF_ERR_GENERIC ] ; then
+                        ocf_log err "${LH} Failed to join the cluster on post-promote. The resource will be restarted."
+                    fi
+                fi
+
+                ocf_log info "${LH} post-promote end."
+                return $rc
+                ;;
+            start)
+                ocf_log info "${LH} post-start begin."
+                local nodes_list="${OCF_RESKEY_CRM_meta_notify_start_uname} ${OCF_RESKEY_CRM_meta_notify_active_uname}"
+                # Do nothing, if the list of nodes being started or running reported empty
+                # Delegate recovery, if needed, to the "running out of the cluster" monitor's logic
+                if [ -z "${nodes_list}" ] ; then
+                  ocf_log warn "${LH} I'm a last man standing and I must survive!"
+                  ocf_log info "${LH} post-start end."
+                  return $OCF_SUCCESS
+                fi
+                # check did this event from this host
+                my_host "${nodes_list}"
+                rc=$?
+                # Do nothing, if there is no master reported
+                # Delegate recovery, if needed, to the "running out of the cluster" monitor's logic
+                if [ -z "${OCF_RESKEY_CRM_meta_notify_master_uname}" ] ; then
+                  ocf_log warn "${LH} there are no nodes to join to reported on post-start. Nothing to do."
+                  ocf_log info "${LH} post-start end."
+                  return $OCF_SUCCESS
+                fi
+                if [ $rc -eq $OCF_SUCCESS ] ; then
+                    # Now we need to:
+                    # a. join to the cluster if we are not joined yet
+                    # b. start the RabbitMQ application, which is always
+                    #    stopped after start action finishes
+                    check_need_join_to ${OCF_RESKEY_CRM_meta_notify_master_uname}
+                    rc_join=$?
+                    if [ $rc_join -eq $OCF_SUCCESS ]; then
+                      ocf_log warn "${LH} Going to join node ${OCF_RESKEY_CRM_meta_notify_master_uname}"
+                      jjj_join "${OCF_RESKEY_CRM_meta_notify_master_uname}"
+                      rc2=$?
+                    else
+                      ocf_log warn "${LH} We are already clustered with node ${OCF_RESKEY_CRM_meta_notify_master_uname}"
+
+                      try_to_start_rmq_app
+                      rc2=$?
+                      update_rabbit_start_time_if_rc $rc2
+                    fi
+                    ocf_log info "${LH} post-start end."
+                    if [ -s "${OCF_RESKEY_definitions_dump_file}" ] ; then
+                        ocf_log info "File ${OCF_RESKEY_definitions_dump_file} exists"
+                        ocf_run curl --silent --show-error --request POST --user $OCF_RESKEY_admin_user:$OCF_RESKEY_admin_password $OCF_RESKEY_host_ip:15672/api/definitions --header "Content-Type:application/json" --data @$OCF_RESKEY_definitions_dump_file
+                        rc=$?
+                        if [ $rc -eq $OCF_SUCCESS ] ; then
+                            ocf_log info "RMQ definitions have imported succesfully."
+                        else
+                            ocf_log err "RMQ definitions have not imported."
+                        fi
+                    fi
+                    if [ $rc2 -eq $OCF_ERR_GENERIC ] ; then
+                        ocf_log warn "${LH} Failed to join the cluster on post-start. The resource will be restarted."
+                        ocf_log info "${LH} post-start end."
+                        return $OCF_ERR_GENERIC
+                    fi
+                fi
+                ;;
+            stop)
+                # if rabbitmq-server stops on any another node, we should remove it from cluster (as ordinary operation)
+                ocf_log info "${LH} post-stop begin."
+                # Report not running, if there are no nodes being stopped reported
+                if [ -z "${OCF_RESKEY_CRM_meta_notify_stop_uname}" ] ; then
+                  ocf_log warn "${LH} there are no nodes being stopped reported on post-stop. The resource will be restarted."
+                  ocf_log info "${LH} post-stop end."
+                  return $OCF_ERR_GENERIC
+                fi
+                my_host "${OCF_RESKEY_CRM_meta_notify_stop_uname}"
+                rc=$?
+                if [ $rc -ne $OCF_SUCCESS ] ; then
+                    # Wait for synced state first
+                    ocf_log info "${LH} waiting $((OCF_RESKEY_stop_time/2)) to sync"
+                    wait_sync $((OCF_RESKEY_stop_time/2))
+                    # On other nodes processing the post-stop, make sure the stopped node will be forgotten
+                    unjoin_nodes_from_cluster "${OCF_RESKEY_CRM_meta_notify_stop_uname}"
+                else
+                    # On the nodes being stopped, reset the master score
+                    ocf_log info "${LH} resetting the master score."
+                    master_score 0
+                fi
+                # always returns OCF_SUCCESS
+                ocf_log info "${LH} post-stop end."
+                ;;
+            *)  ;;
+        esac
+    fi
+
+    return $OCF_SUCCESS
+}
+
+
+action_promote() {
+    local rc=$OCF_ERR_GENERIC
+    local LH="${LL} promote:"
+
+    if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
+        d=$(date '+%Y%m%d %H:%M:%S')
+        echo $d >> /tmp/rmq-promote.log
+        env >> /tmp/rmq-promote.log
+        echo "$d  [promote]  start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
+    fi
+
+    ocf_log info "${LH} action begin."
+
+    get_monitor
+    rc=$?
+    ocf_log info "${LH} get_monitor returns ${rc}"
+    case "$rc" in
+        "$OCF_SUCCESS")
+            # Running as slave. Normal, expected behavior.
+            ocf_log info "${LH} Resource is currently running as Slave"
+            # rabbitmqctl start_app if need
+            get_status rabbit
+            rc=$?
+            ocf_log info "${LH} Updating cluster master attribute"
+            ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --update 'true'
+            if [ $rc -ne $OCF_SUCCESS ] ; then
+                ocf_log info "${LH} RMQ app is not started. Starting..."
+                start_rmq_server_app
+                rc=$?
+                if [ $rc -eq 0 ] ; then
+                    try_to_start_rmq_app
+                    rc=$?
+                    if [ $rc -ne 0 ] ; then
+                        ocf_log err "${LH} Can't start RMQ app. Master resource is failed."
+                        ocf_log info "${LH} action end."
+                        exit $OCF_FAILED_MASTER
+                    fi
+
+                    [ -f "${OCF_RESKEY_policy_file}" ] && . "${OCF_RESKEY_policy_file}"
+
+                    update_rabbit_start_time_if_rc $rc
+
+                    ocf_log info "${LH} Checking master status"
+                    get_monitor
+                    rc=$?
+                    ocf_log info "${LH} Master status is $rc"
+                    if [ $rc = $OCF_RUNNING_MASTER ]
+                    then
+                       rc=$OCF_SUCCESS
+                    else
+                       ocf_log err "${LH} Master resource is failed."
+                       ocf_log info "${LH} action end."
+                       exit $OCF_FAILED_MASTER
+                    fi
+                else
+                    ocf_log err "${LH} Can't start RMQ-runtime."
+                    rc=$OCF_ERR_GENERIC
+                fi
+            fi
+            return $rc
+            ;;
+        "$OCF_RUNNING_MASTER")
+            # Already a master. Unexpected, but not a problem.
+            ocf_log warn "${LH} Resource is already running as Master"
+            rc=$OCF_SUCCESS
+            ;;
+
+        "$OCF_FAILED_MASTER")
+            # Master failed.
+            ocf_log err "${LH} Master resource is failed and not running"
+            ocf_log info "${LH} action end."
+            exit $OCF_FAILED_MASTER
+            ;;
+
+        "$OCF_NOT_RUNNING")
+            # Currently not running.
+            ocf_log err "${LH} Resource is currently not running"
+            rc=$OCF_NOT_RUNNING
+            ;;
+        *)
+            # Failed resource. Let the cluster manager recover.
+            ocf_log err "${LH} Unexpected error, cannot promote"
+            ocf_log info "${LH} action end."
+            exit $rc
+            ;;
+    esac
+
+    # transform slave RMQ-server to master
+
+    ocf_log info "${LH} action end."
+    return $rc
+}
+
+
+action_demote() {
+    local LH="${LL} demote:"
+    ocf_log info "${LH} action begin."
+    ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --delete
+    ocf_log info "${LH} action end."
+    return $OCF_SUCCESS
+}
+#######################################################################
+
+rmq_setup_env
+
+case "$1" in
+  meta-data)    meta_data
+                exit $OCF_SUCCESS;;
+  usage|help)   usage
+                exit $OCF_SUCCESS;;
+esac
+
+# Anything except meta-data and help must pass validation
+action_validate || exit $?
+
+# What kind of method was invoked?
+case "$1" in
+  start)        action_start;;
+  stop)         action_stop;;
+  status)       action_status;;
+  monitor)      action_monitor;;
+  validate)     action_validate;;
+  promote)      action_promote;;
+  demote)       action_demote;;
+  notify)       action_notify;;
+  validate-all) action_validate;;
+  *)            usage;;
+esac
+###
similarity index 98%
rename from debian/ocf/rabbitmq-server
rename to scripts/rabbitmq-server.ocf
index c9279710ea00fa1d0cf907373cba25807ed2c66d..804e65423d03ce057893fb077e507900704239d7 100755 (executable)
@@ -12,7 +12,7 @@
 ##  The Original Code is RabbitMQ.
 ##
 ##  The Initial Developer of the Original Code is GoPivotal, Inc.
-##  Copyright (c) 2007-2014 GoPivotal, Inc.  All rights reserved.
+##  Copyright (c) 2007-2015 Pivotal Software, Inc.  All rights reserved.
 ##
 
 ##
@@ -299,7 +299,7 @@ rabbit_stop() {
         return $OCF_SUCCESS
     fi
 
-    $RABBITMQ_CTL stop
+    $RABBITMQ_CTL stop ${RABBITMQ_PID_FILE}
     rc=$?
 
     if [ "$rc" != 0 ]; then
diff --git a/scripts/travis_test_ocf_ra.sh b/scripts/travis_test_ocf_ra.sh
new file mode 100644 (file)
index 0000000..e8f9a74
--- /dev/null
@@ -0,0 +1,30 @@
+#!/bin/sh -eux
+# Prepare and run a smoke test against the RabbitMQ OCF RA only if
+# the scripts/rabbitmq-server-ha.ocf has changes
+if ! git diff HEAD~ --name-only | grep -q scripts/rabbitmq-server-ha.ocf
+then
+  exit 0
+fi
+
+export VAGRANT_VERSION=1.8.1
+export DOCKER_IMAGE=bogdando/rabbitmq-cluster-ocf-wily
+export UPLOAD_METHOD=none
+export DOCKER_MOUNTS="$(pwd)/scripts/rabbitmq-server-ha.ocf:/tmp/rabbitmq-server-ha"
+
+# Install vagrant and requirements
+sudo apt-get install -qq git wget
+wget --no-verbose https://releases.hashicorp.com/vagrant/${VAGRANT_VERSION}/vagrant_${VAGRANT_VERSION}_x86_64.deb
+sudo dpkg -i --force-all ./vagrant_${VAGRANT_VERSION}_x86_64.deb
+vagrant plugin install vagrant-triggers
+
+# Update docker and prepare images
+sudo apt-get update
+sudo DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install --only-upgrade docker-engine
+sudo service docker restart
+docker pull $DOCKER_IMAGE
+
+# Prepare and run a smoke test for a rabbitmq cluster by the OCF RA
+git clone https://github.com/bogdando/rabbitmq-cluster-ocf-vagrant.git
+cd ./rabbitmq-cluster-ocf-vagrant
+vagrant up --provider docker
+docker exec -it n1 /bin/bash /vagrant/vagrant_script/test_rabbitcluster.sh rabbit@n1 rabbit@n2
diff --git a/upgrade/Makefile b/upgrade/Makefile
new file mode 100644 (file)
index 0000000..f7cc0a6
--- /dev/null
@@ -0,0 +1,109 @@
+
+UPGRADE_FROM = 3.6.5
+
+# UPGRADE_FROM_SCRIPT specifies a script name to use.
+# it should not be changed for every UPGRADE_FROM version
+UPGRADE_FROM_SCRIPT = 3.5
+UPGRADE_TO_SCRIPT = 3.6
+
+RELEASE_FOR_UPGRADE = $(CURDIR)/rabbitmq_r_$(UPGRADE_FROM)
+RELEASE_ARCHIVE = rabbitmq-server-generic-unix-$(UPGRADE_FROM).tar.xz
+RELEASE_FOR_UPGRADE_URL = "http://www.rabbitmq.com/releases/rabbitmq-server/v$(UPGRADE_FROM)/$(RELEASE_ARCHIVE)"
+RELEASE_UPGRADE_DATA = $(CURDIR)/rabbitmq_server_upgrade_data/rabbitmq-test-instances
+UPGRADE_CONFIG_DIR = $(CURDIR)/config
+UPGRADE_SCRIPTS_DIR = $(CURDIR)/scripts
+
+QUEUES_COUNT_TRANSIENT ?= 1
+QUEUES_COUNT_DURABLE ?= 1
+MSGS_COUNT_NON_PERSISTENT ?= 10
+MSGS_COUNT_PERSISTENT_INDEX ?= 10
+MSGS_COUNT_PERSISTENT_STORE ?= 10
+INDEX_MSG_SIZE ?= 50
+STORE_MSG_SIZE ?= 150
+
+# Clean up erlang.mk exported variables
+ifneq (0,${MAKELEVEL})
+export ERL_LIBS=
+export RABBITMQ_SCRIPTS_DIR=
+endif
+
+all: verify-and-cleanup
+
+$(RELEASE_ARCHIVE):
+       wget $(RELEASE_FOR_UPGRADE_URL) -O $(RELEASE_ARCHIVE)
+
+$(RELEASE_FOR_UPGRADE): $(RELEASE_ARCHIVE)
+       mkdir $(RELEASE_FOR_UPGRADE)
+       tar -xvf $(RELEASE_ARCHIVE)
+       mv rabbitmq_server-*/* $(RELEASE_FOR_UPGRADE)
+
+run-release-for-upgrade: $(RELEASE_FOR_UPGRADE)
+       rm -rf $(RELEASE_UPGRADE_DATA)
+       mkdir -p $(RELEASE_UPGRADE_DATA)
+       cd $(RELEASE_FOR_UPGRADE) && \
+       RABBITMQ_HOME=$(RELEASE_FOR_UPGRADE) \
+       RABBITMQ_CONFIG_FILE=$(UPGRADE_CONFIG_DIR)/rabbitmq \
+       RABBITMQ_NODENAME=rabbit_upgrade \
+       RABBITMQ_ENABLED_PLUGINS_FILE=$(UPGRADE_CONFIG_DIR)/enabled_plugins \
+       RABBITMQ_LOG_BASE=$(RELEASE_UPGRADE_DATA)/rabbit_upgrade/log \
+       RABBITMQ_MNESIA_BASE=$(RELEASE_UPGRADE_DATA)/rabbit_upgrade/mnesia \
+       RABBITMQ_MNESIA_DIR=$(RELEASE_UPGRADE_DATA)/rabbit_upgrade/mnesia/rabbit_upgrade \
+       RABBITMQ_SCHEMA_DIR=$(RELEASE_UPGRADE_DATA)/rabbit_upgrade/schema \
+       RABBITMQ_PLUGINS_EXPAND_DIR=$(RELEASE_UPGRADE_DATA)/rabbit_upgrade/plugins \
+       RABBITMQ_PLUGINS_DIR=$(RELEASE_FOR_UPGRADE)/plugins \
+       ./sbin/rabbitmq-server -detached
+       sleep 10
+
+stop-release-for-upgrade:
+       cd $(RELEASE_FOR_UPGRADE) && \
+       RABBITMQ_HOME=$(RELEASE_FOR_UPGRADE) \
+       ./sbin/rabbitmqctl -n rabbit_upgrade stop
+
+setup-release-for-upgrade: run-release-for-upgrade
+       RABBITMQCTL="$(RELEASE_FOR_UPGRADE)/sbin/rabbitmqctl -n rabbit_upgrade" \
+       UPGRADE_FROM_SCRIPT=$(UPGRADE_FROM_SCRIPT) \
+       QUEUES_COUNT_TRANSIENT=$(QUEUES_COUNT_TRANSIENT) \
+       QUEUES_COUNT_DURABLE=$(QUEUES_COUNT_DURABLE) \
+       MSGS_COUNT_NON_PERSISTENT=$(MSGS_COUNT_NON_PERSISTENT) \
+       MSGS_COUNT_PERSISTENT_INDEX=$(MSGS_COUNT_PERSISTENT_INDEX) \
+       MSGS_COUNT_PERSISTENT_STORE=$(MSGS_COUNT_PERSISTENT_STORE) \
+       INDEX_MSG_SIZE=$(INDEX_MSG_SIZE) \
+       STORE_MSG_SIZE=$(STORE_MSG_SIZE) \
+       bash $(UPGRADE_SCRIPTS_DIR)/upgrade-from.sh
+
+prepare-release-for-upgrade: setup-release-for-upgrade stop-release-for-upgrade
+
+run-broker-upgrade:
+       $(MAKE) -C ../ run-background-broker \
+       RABBITMQ_CONFIG_FILE=$(UPGRADE_CONFIG_DIR)/rabbitmq \
+       RABBITMQ_NODENAME=rabbit_upgrade \
+       TMPDIR=`dirname $(RELEASE_UPGRADE_DATA)` \
+       PLUGINS=rabbitmq_management
+       sleep 10
+
+verify-upgrade: prepare-release-for-upgrade run-broker-upgrade
+       RABBITMQCTL="$(RELEASE_FOR_UPGRADE)/sbin/rabbitmqctl -n rabbit_upgrade" \
+       UPGRADE_TO_SCRIPT=$(UPGRADE_TO_SCRIPT) \
+       QUEUES_COUNT_TRANSIENT=$(QUEUES_COUNT_TRANSIENT) \
+       QUEUES_COUNT_DURABLE=$(QUEUES_COUNT_DURABLE) \
+       MSGS_COUNT_NON_PERSISTENT=$(MSGS_COUNT_NON_PERSISTENT) \
+       MSGS_COUNT_PERSISTENT_INDEX=$(MSGS_COUNT_PERSISTENT_INDEX) \
+       MSGS_COUNT_PERSISTENT_STORE=$(MSGS_COUNT_PERSISTENT_STORE) \
+       INDEX_MSG_SIZE=$(INDEX_MSG_SIZE) \
+       STORE_MSG_SIZE=$(STORE_MSG_SIZE) \
+       $(UPGRADE_SCRIPTS_DIR)/upgrade-to.sh
+
+verify-and-stop: verify-upgrade
+       -$(MAKE) stop-release-for-upgrade
+
+verify-and-cleanup: verify-and-stop
+       $(MAKE) clean
+
+clean: 
+       -$(MAKE) stop-release-for-upgrade
+       rm -rf rabbitmq_server*
+       rm -rf rabbitmq_r*
+       rm -rf rabbitmqadmin
+
+distclean: clean
+       rm -rf rabbitmq-server-generic-unix-*
\ No newline at end of file
diff --git a/upgrade/README.md b/upgrade/README.md
new file mode 100644 (file)
index 0000000..a283fbe
--- /dev/null
@@ -0,0 +1,64 @@
+## Testing RabbitMQ upgrades.
+
+This tool can be used to test upgrade of RabbitMQ to a branch version.
+`rabbitmq-server` dependency should be checked out to the tested branch.
+
+### How it works:
+
+This tool use GNU Make to run following steps:
+
+ - install generic unix release for specified version to `rabbitmq_r_<version>`
+ - start a server with configuration from `config` directory and data directory in `rabbitmq_server_upgrade_data`
+ - set up vhost, user, policies, exchange (fanout), queues (transient and durable)
+ - publish messages to queues (persistent to queue index, persistent to storage, not persistent)
+ - stop the server
+ - start local branch server using `run-background-broker`
+ - verify everything is in place (transient queues and messages are expected to be lost)
+ - stop local server
+
+By default it will also clean up the test data.
+
+### Custom targets and configuration
+
+Make targets:
+
+ - `run-release-for-upgrade` - download release and run it in `rabbitmq_r_<version>` with data in `rabbitmq_server_upgrade_data`
+ - `setup-release-for-upgrade` - run previous step and set up test data
+ - `prepare-release-for-upgrade` - run previous step and stop the server (can be used to build "pre upgrade" state)
+ - `run-broker-upgrade` - run previous step and run a current branch server (can be used to observe "after upgrade" state for manual verification)
+ - `verify-upgrade` - run previous step and verification script (that will consume published messages and delete some vhosts)
+ - `verify-and-stop` - run previous stap and stop the server
+ - `verify-and-cleanup` - run previous step and delete test data **this is the default step**
+
+Additional targets:
+
+ - `clean` - stop server (if running) and delete temporary data
+ - `distclean` - same as `clean`, but also removes downloaded release package
+
+Environment:
+
+Following environment parameters can be used to configure upgrade validation:
+
+| parameter                   | default | description                                                    |
+|-----------------------------|---------|----------------------------------------------------------------|
+| UPGRADE_FROM                | 3.6.5   | Release version to upgrade from                                |
+| QUEUES_COUNT_TRANSIENT      | 1       | Number of transient queues                                     |
+| QUEUES_COUNT_DURABLE        | 1       | Number of durable queues                                       |
+| MSGS_COUNT_NON_PERSISTENT   | 10      | Number of transient messages to publish                        |
+| MSGS_COUNT_PERSISTENT_INDEX | 10      | Number of persistent messages to publish to queue index        |
+| MSGS_COUNT_PERSISTENT_STORE | 10      | Number of persistent messages to publish to message store      |
+| INDEX_MSG_SIZE              | 50      | Message size to fit queue index (depends on configuration)     |
+| STORE_MSG_SIZE              | 150     | Message size to not fit queue index (depends on configuration) |
+
+`INDEX_MSG_SIZE` and `STORE_MSG_SIZE` should be set to be more and less than `queue_index_embed_msgs_below` setting in `config/rabbitmq.config` file respectively.
+
+Unsafe. Do not change without need:
+
+| parameter | default | description |
+|-----------|---------|-------------|
+| UPGRADE_FROM_SCRIPT | 3.5   | Script to use for data setup. It's only `3.5` now used for both `3.5.x` and `3.6.x` releases |
+| UPGRADE_TO_SCRIPT   | 3.6   | Script to use for verification. Should correspond with branch version  |
+| RELEASE_ARCHIVE     | rabbitmq-server-generic-unix-$(UPGRADE_FROM).tar.xz | Filename for release archive |
+| RELEASE_FOR_UPGRADE_URL | http://www.rabbitmq.com/releases/rabbitmq-server/v$(UPGRADE_FROM)/$(RELEASE_ARCHIVE) | URL to load $RELEASE_ARCHIVE from. Should point to directly accessible (via wget) generic unix archive |
+
+
diff --git a/upgrade/config/enabled_plugins b/upgrade/config/enabled_plugins
new file mode 100644 (file)
index 0000000..352dfc4
--- /dev/null
@@ -0,0 +1 @@
+[rabbitmq_management].
diff --git a/upgrade/config/rabbitmq.config b/upgrade/config/rabbitmq.config
new file mode 100644 (file)
index 0000000..6a0278d
--- /dev/null
@@ -0,0 +1 @@
+[{rabbit, [{queue_index_embed_msgs_below, 100}]}].
diff --git a/upgrade/scripts/upgrade-from-3.5-helpers.sh b/upgrade/scripts/upgrade-from-3.5-helpers.sh
new file mode 100644 (file)
index 0000000..a8adbd3
--- /dev/null
@@ -0,0 +1,80 @@
+setup_steps() {
+    local test_vhost='test'
+    local test_user='test_user'
+
+    ${RABBITMQCTL} add_user $test_user $test_user
+    ${RABBITMQCTL} set_user_tags $test_user policymaker
+    ${RABBITMQCTL} add_vhost $test_vhost
+    ${RABBITMQCTL} set_permissions -p $test_vhost $test_user '.*' '.*' '.*'
+    ${RABBITMQCTL} set_policy -p $test_vhost my_policy_name "policy.*" '{"max-length":300}'
+
+    # TODO: create exchanges, queues, publish messages
+    local exchange_name="test_exchange"
+    local queue_name_base="test_queue"
+
+    prepare_rabbitmqadmin
+
+    ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost declare exchange name=$exchange_name type=fanout
+    for i in `seq 1 $QUEUES_COUNT_TRANSIENT`
+    do
+        local queue_name="${queue_name_base}_trans_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost declare queue name=$queue_name durable=false
+        ./rabbitmqadmin -utest_user -ptest_user -V $test_vhost declare binding source=$exchange_name destination=$queue_name
+    done
+
+    for i in `seq 1 $QUEUES_COUNT_DURABLE`
+    do
+        local queue_name="${queue_name_base}_dur_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost declare queue name=$queue_name durable=true
+        ./rabbitmqadmin -utest_user -ptest_user -V $test_vhost declare binding source=$exchange_name destination=$queue_name
+    done
+
+    local sequence_index=`seq 1 $INDEX_MSG_SIZE`
+    local msg_payload_index=`printf '=%.0s' $sequence_index`
+    local sequence_store=`seq 1 $STORE_MSG_SIZE`
+    local msg_payload_store=`printf '+%.0s' $sequence_store`
+    for i in `seq 1 $MSGS_COUNT_NON_PERSISTENT`
+    do
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost publish routing_key=any exchange=$exchange_name payload=$msg_payload
+    done
+
+    for i in `seq 1 $MSGS_COUNT_PERSISTENT_INDEX`
+    do
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost publish routing_key=any exchange=$exchange_name payload=$msg_payload_index properties='{"delivery_mode":2}'
+    done
+
+    for i in `seq 1 $MSGS_COUNT_PERSISTENT_STORE`
+    do
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost publish routing_key=any exchange=$exchange_name payload=$msg_payload_store properties='{"delivery_mode":2}'
+    done
+
+    # Second vhost to test data isolation
+
+    local test_vhost1="test_vhost_1"
+    local msg_payload_index_1=`printf '_%.0s' $sequence_index`
+    local msg_payload_store_1=`printf '0%.0s' $sequence_store`
+
+    ${RABBITMQCTL} add_vhost $test_vhost1
+    ${RABBITMQCTL} set_permissions -p $test_vhost1 $test_user '.*' '.*' '.*'
+
+    ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 declare exchange name=$exchange_name type=fanout
+    for i in `seq 1 $QUEUES_COUNT_DURABLE`
+    do
+        local queue_name="${queue_name_base}_dur_vhost1_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 declare queue name=$queue_name durable=true
+        ./rabbitmqadmin -utest_user -ptest_user -V $test_vhost1 declare binding source=$exchange_name destination=$queue_name
+    done
+    for i in `seq 1 $MSGS_COUNT_PERSISTENT_INDEX`
+    do
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 publish routing_key=any exchange=$exchange_name payload=$msg_payload_index_1 properties='{"delivery_mode":2}'
+    done
+
+    for i in `seq 1 $MSGS_COUNT_PERSISTENT_STORE`
+    do
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 publish routing_key=any exchange=$exchange_name payload=$msg_payload_store_1 properties='{"delivery_mode":2}'
+    done
+
+    ./rabbitmqadmin -utest_user -ptest_user -V $test_vhost list queues name durable messages
+    ./rabbitmqadmin -utest_user -ptest_user -V $test_vhost list exchanges
+}
+
diff --git a/upgrade/scripts/upgrade-from.sh b/upgrade/scripts/upgrade-from.sh
new file mode 100755 (executable)
index 0000000..90ac072
--- /dev/null
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+set -ex
+
+echo "UPGRADE_FROM ${UPGRADE_FROM}"
+
+. "$(dirname "$0")/upgrade-helpers.sh"
+. "$(dirname "$0")/upgrade-from-${UPGRADE_FROM_SCRIPT}-helpers.sh"
+
+setup_steps
\ No newline at end of file
diff --git a/upgrade/scripts/upgrade-helpers.sh b/upgrade/scripts/upgrade-helpers.sh
new file mode 100644 (file)
index 0000000..ccfc5f9
--- /dev/null
@@ -0,0 +1,5 @@
+prepare_rabbitmqadmin() {
+    rm -rf rabbitmqadmin
+    wget localhost:15672/cli/rabbitmqadmin
+    chmod +x rabbitmqadmin
+}
\ No newline at end of file
diff --git a/upgrade/scripts/upgrade-to-3.6-helpers.sh b/upgrade/scripts/upgrade-to-3.6-helpers.sh
new file mode 100644 (file)
index 0000000..4fd8866
--- /dev/null
@@ -0,0 +1,101 @@
+verify_steps() {
+    local test_vhost='test'
+    local test_user='test_user'
+
+    ${RABBITMQCTL} list_users | grep $test_user
+    ${RABBITMQCTL} list_vhosts | grep $test_vhost
+    ${RABBITMQCTL} list_user_permissions $test_user | grep $test_vhost | grep -F ".*"
+    ${RABBITMQCTL} list_permissions -p $test_vhost | grep $test_user | grep -F ".*"
+    ${RABBITMQCTL} list_policies -p $test_vhost | \
+        grep $test_vhost | \
+        grep my_policy_name | \
+        grep -F "policy.*" | \
+        grep -F '{"max-length":300}'
+
+    local exchange_name="test_exchange"
+    local queue_name_base="test_queue"
+
+    prepare_rabbitmqadmin
+    ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list exchanges | grep $exchange_name
+
+    local sequence_index=`seq 1 $INDEX_MSG_SIZE`
+    local msg_payload_index=`printf '=%.0s' $sequence_index`
+    local sequence_store=`seq 1 $STORE_MSG_SIZE`
+    local msg_payload_store=`printf '+%.0s' $sequence_store`
+
+    # Durable queues survive upgrade
+    for i in `seq 1 $QUEUES_COUNT_DURABLE`
+    do
+        local queue_name="${queue_name_base}_dur_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list queues name durable | \
+            grep $queue_name | grep True
+        # Each queue have $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list queues name messages | \
+            grep $queue_name | grep `expr $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE`
+
+        # Drain persistent messages from queue index
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_INDEX`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost get queue=$queue_name count=1 requeue=false | \
+                grep $msg_payload_index | grep $INDEX_MSG_SIZE | grep $exchange_name
+        done
+        # Drain persistent messages from message store
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_STORE`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost get queue=$queue_name count=1 requeue=false | \
+                grep $msg_payload_store | grep $STORE_MSG_SIZE | grep $exchange_name
+        done
+        # No more messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost get queue=$queue_name count=1 requeue=false | \
+            grep "No items"
+
+    done
+
+    # Transient queues are deleted
+    (! ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list queues name | grep "${queue_name_base}_trans_") || exit 2
+
+    # Test first vhost and check second
+    ${RABBITMQCTL} delete_vhost $test_vhost
+
+    # Also delete default vhost
+    ${RABBITMQCTL} delete_vhost /
+
+    local test_vhost1="test_vhost_1"
+    local msg_payload_index_1=`printf '_%.0s' $sequence_index`
+    local msg_payload_store_1=`printf '0%.0s' $sequence_store`
+
+    ${RABBITMQCTL} list_vhosts | grep $test_vhost1
+    ${RABBITMQCTL} list_user_permissions $test_user | grep $test_vhost1 | grep -F ".*"
+    ${RABBITMQCTL} list_permissions -p $test_vhost1 | grep $test_user | grep -F ".*"
+
+    ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 list exchanges | grep $exchange_name
+
+    # Durable queues
+    for i in `seq 1 $QUEUES_COUNT_DURABLE`
+    do
+        local queue_name="${queue_name_base}_dur_vhost1_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 list queues name durable | \
+            grep $queue_name | grep True
+        # Each queue have $MSGS_COUNT_PERSISTENT messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 list queues name messages | \
+            grep $queue_name | grep `expr $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE`
+
+        # Drain persistent messages from queue index
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_INDEX`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 get queue=$queue_name count=1 requeue=false | \
+                grep $msg_payload_index_1 | grep $INDEX_MSG_SIZE | grep $exchange_name
+        done
+        # Drain persistent messages from message store
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_STORE`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 get queue=$queue_name count=1 requeue=false | \
+                grep $msg_payload_store_1 | grep $STORE_MSG_SIZE | grep $exchange_name
+        done
+        # No more messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 get queue=$queue_name count=1 requeue=false | \
+            grep "No items"
+    done
+
+    echo "Finish verify"
+}
diff --git a/upgrade/scripts/upgrade-to-3.7-helpers.sh b/upgrade/scripts/upgrade-to-3.7-helpers.sh
new file mode 100644 (file)
index 0000000..54032ac
--- /dev/null
@@ -0,0 +1,104 @@
+verify_steps() {
+    local test_vhost='test'
+    local test_user='test_user'
+
+    ${RABBITMQCTL} list_users | grep $test_user
+    ${RABBITMQCTL} list_vhosts | grep $test_vhost
+    ${RABBITMQCTL} list_user_permissions $test_user | grep $test_vhost | grep -F ".*"
+    ${RABBITMQCTL} list_permissions -p $test_vhost | grep $test_user | grep -F ".*"
+    ${RABBITMQCTL} list_policies -p $test_vhost | \
+        grep $test_vhost | \
+        grep my_policy_name | \
+        grep -F "policy.*" | \
+        grep -F '{"max-length":300}'
+
+    local exchange_name="test_exchange"
+    local queue_name_base="test_queue"
+
+    prepare_rabbitmqadmin
+    ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list exchanges | grep $exchange_name
+
+    local sequence_index=`seq 1 $INDEX_MSG_SIZE`
+    local msg_payload_index=`printf '=%.0s' $sequence_index`
+    local sequence_store=`seq 1 $STORE_MSG_SIZE`
+    local msg_payload_store=`printf '+%.0s' $sequence_store`
+    
+    # Durable queues survive upgrade
+    for i in `seq 1 $QUEUES_COUNT_DURABLE`
+    do
+        local queue_name="${queue_name_base}_dur_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list queues name durable | \
+            grep $queue_name | grep True
+        # Each queue have $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list queues name messages | \
+            grep $queue_name | grep `expr $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE`
+
+        # Drain persistent messages from queue index
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_INDEX`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost get queue=$queue_name count=1 ackmode=ack_requeue_false | \
+                grep $msg_payload_index | grep $INDEX_MSG_SIZE | grep $exchange_name
+        done
+        # Drain persistent messages from message store
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_STORE`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost get queue=$queue_name count=1 ackmode=ack_requeue_false | \
+                grep $msg_payload_store | grep $STORE_MSG_SIZE | grep $exchange_name
+        done
+        # No more messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost get queue=$queue_name count=1 ackmode=ack_requeue_false | \
+            grep "No items"
+
+    done
+
+    # Transient queues are deleted
+    (! ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost list queues name | grep "${queue_name_base}_trans_") || exit 2
+
+    # Test first vhost and check second
+    ${RABBITMQCTL} delete_vhost $test_vhost
+
+    # Also delete default vhost
+    ${RABBITMQCTL} delete_vhost /
+
+    local test_vhost1="test_vhost_1"
+    local msg_payload_index_1=`printf '_%.0s' $sequence_index`
+    local msg_payload_store_1=`printf '0%.0s' $sequence_store`
+
+    ${RABBITMQCTL} list_vhosts | grep $test_vhost1
+    ${RABBITMQCTL} list_user_permissions $test_user | grep $test_vhost1 | grep -F ".*"
+    ${RABBITMQCTL} list_permissions -p $test_vhost1 | grep $test_user | grep -F ".*"
+
+    ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 list exchanges | grep $exchange_name
+
+    # Durable queues
+    for i in `seq 1 $QUEUES_COUNT_DURABLE`
+    do
+        local queue_name="${queue_name_base}_dur_vhost1_${i}"
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 list queues name durable | \
+            grep $queue_name | grep True
+        # Each queue have $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 list queues name messages | \
+            grep $queue_name | grep `expr $MSGS_COUNT_PERSISTENT_INDEX + $MSGS_COUNT_PERSISTENT_STORE`
+
+        # Drain persistent messages from queue index
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_INDEX`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 get queue=$queue_name count=1 ackmode=ack_requeue_false | \
+                grep $msg_payload_index_1 | grep $INDEX_MSG_SIZE | grep $exchange_name
+        done
+        # Drain persistent messages from message store
+        for j in `seq 1 $MSGS_COUNT_PERSISTENT_STORE`
+        do
+            ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 get queue=$queue_name count=1 ackmode=ack_requeue_false | \
+                grep $msg_payload_store_1 | grep $STORE_MSG_SIZE | grep $exchange_name
+        done
+        # No more messages
+        ./rabbitmqadmin -u $test_user -p $test_user -V $test_vhost1 get queue=$queue_name count=1 ackmode=ack_requeue_false | \
+            grep "No items"
+    done
+
+    echo "Finish verify"
+}
+
+
+ackmode=ack_requeue_false
\ No newline at end of file
diff --git a/upgrade/scripts/upgrade-to.sh b/upgrade/scripts/upgrade-to.sh
new file mode 100755 (executable)
index 0000000..e58348e
--- /dev/null
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -ex
+
+. "$(dirname "$0")/upgrade-helpers.sh"
+. "$(dirname "$0")/upgrade-to-${UPGRADE_TO_SCRIPT}-helpers.sh"
+
+verify_steps